Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Bugfix] Correct zos_copy errror resulting when source is a Unix file and destination is a PDSE #327

Merged
merged 12 commits into from
Aug 29, 2022
8 changes: 8 additions & 0 deletions changelogs/fragments/327-update-zos_copy-uss-to-pdse.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
bugfixes:
- >
zos_copy - module was updated to correct a bug in the case when the
destination (dest) is a PDSE and the source (src) is a Unix Systems File
(USS). The module would fail in determining if the PDSE actually existed and
try to create it when it already existed resulting in an error that would
prevent the module from correctly executing.
(https://github.com/ansible-collections/ibm_zos_core/pull/327)
87 changes: 49 additions & 38 deletions plugins/modules/zos_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -1362,7 +1362,7 @@ def copy_to_member(
if rc != 0:
msg = ""
if is_uss_src:
msg = "Unable to copy file {0} to data set member {1}".format(src, dest)
msg = "Unable to copy file {0} to data set member {1}, {2}, {3}".format(src, dest, temp_path, new_src)
else:
msg = "Unable to copy data set member {0} to {1}".format(src, dest)

Expand Down Expand Up @@ -1407,40 +1407,47 @@ def create_pdse(
remote_src {bool} -- Whether source is located on remote system.
(Default {False})
src_vol {str} -- Volume where source data set is stored. (Default {None})
"""
rc = out = err = None
if remote_src:
if src_ds_type in MVS_PARTITIONED:
rc = self.allocate_model(dest_name, src, vol=alloc_vol)

elif src_ds_type in MVS_SEQ:
rc = self._allocate_pdse(
dest_name, src_vol=src_vol, src=src, alloc_vol=alloc_vol
)

elif os.path.isfile(src):
size = os.stat(src).st_size
rc = self._allocate_pdse(dest_name, size=size)
Returns:
{bool} -- True if the PDSE was created, False if it was already present
"""
changed = False

elif os.path.isdir(src):
path, dirs, files = next(os.walk(src))
if dirs:
self.fail_json(
msg="Subdirectory found in source directory {0}".format(src)
try:
if remote_src:
if src_ds_type in MVS_PARTITIONED:
# Failure of this function is already addressed inside of it.
self.allocate_model(dest_name, src, vol=alloc_vol)

elif src_ds_type in MVS_SEQ:
changed = self._allocate_pdse(
dest_name, src_vol=src_vol, src=src, alloc_vol=alloc_vol
)
size = sum(os.stat(path + "/" + f).st_size for f in files)
rc = self._allocate_pdse(dest_name, size=size)
else:
rc = self._allocate_pdse(dest_name, src=src, size=size, alloc_vol=alloc_vol)

if rc != 0:
elif os.path.isfile(src):
size = os.stat(src).st_size
changed = self._allocate_pdse(dest_name, size=size)

elif os.path.isdir(src):
path, dirs, files = next(os.walk(src))
if dirs:
self.fail_json(
msg="Subdirectory found in source directory {0}".format(src)
)
size = sum(os.stat(path + "/" + f).st_size for f in files)
changed = self._allocate_pdse(dest_name, size=size)
else:
changed = self._allocate_pdse(dest_name, src=src, size=size, alloc_vol=alloc_vol)

return changed
except data_set.DatasetCreateError as e:
self.fail_json(
msg="Unable to allocate destination data set {0} to receive {1}".format(dest_name, src),
stdout=out,
stderr=err,
rc=rc,
stdout_lines=out.splitlines() if out else None,
stderr_lines=err.splitlines() if err else None,
stdout=None,
stderr=e.msg,
rc=e.rc,
stdout_lines=None,
stderr_lines=e.msg.splitlines() if e.msg else None,
)

def _allocate_pdse(
Expand All @@ -1464,6 +1471,9 @@ def _allocate_pdse(
src {str} -- The name of the source data set from which to get the size
src_vol {str} -- Volume of the source data set
allc_vol {str} -- The volume where PDSE should be allocated

Returns:
{bool} -- True if the PDSE was created, False if it was already present
"""
rc = -1
recfm = "FB"
Expand All @@ -1482,21 +1492,21 @@ def _allocate_pdse(
else:
alloc_size = 5242880 # Use the default 5 Megabytes

alloc_size = "{0}K".format(str(int(math.ceil(alloc_size / 1024))))
alloc_size = int(math.ceil(alloc_size / 1024))
parms = dict(
name=ds_name,
replace=False,
type="PDSE",
primary_space=alloc_size,
space_primary=alloc_size,
space_type="K",
record_format=recfm,
record_length=lrecl
)
if alloc_vol:
parms['volume'] = alloc_vol
parms['volumes'] = alloc_vol

response = datasets._create(**parms)
rc = response.rc
changed = data_set.DataSet.ensure_present(ds_name, **parms)

return rc
return changed


def backup_data(ds_name, ds_type, backup_name):
Expand Down Expand Up @@ -1739,10 +1749,11 @@ def run_module(module, arg_def):
dest_exists = os.path.exists(dest)
else:
dest_du = data_set.DataSetUtils(dest_name)
dest_exists = dest_du.exists()
dest_exists = data_set.DataSet.data_set_exists(dest_name, volume)
if copy_member:
dest_exists = dest_exists and dest_du.member_exists(dest_member)
dest_exists = dest_exists and data_set.DataSet.data_set_member_exists(dest)
dest_ds_type = dest_du.ds_type()

if temp_path or "/" in src:
src_ds_type = "USS"
else:
Expand Down
70 changes: 69 additions & 1 deletion tests/dependencyfinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,22 @@


class ArtifactManager(object):
"""
Dependency analyzer will review modules and action plugin changes and then
discover which tests should be run. It addition to mapping a test suite,
whether it is functional or unit, it will also see if the module/plugin
is used in test cases. In the even a module is used in another test suite
unrelated to the modules test suite, it will also be returned. This ensures
that a module changes don't break test suites dependent on a module.

Usage (minimal) example:
python dependencyfinder.py -p .. -b origin/dev -m

Note: It is possible that only test cases are modified only, without a module
or modules, in that case without a module pairing no test cases will be
returned. Its best to run full regression in that case until this can be
updated to support detecting only test cases.
"""
artifacts = []

def __init__(self, artifacts=None):
Expand Down Expand Up @@ -198,6 +214,14 @@ def __init__(self, name, source, path, dependencies=None):
if dependencies:
self.dependencies = dependencies

def __str__(self):
"""
Print the Artifact class instance variables in a pretty manor.
"""
return "name: {0},\nsource: {1},\npath: {2}\n".format(self.name,
self.source,
self.path)

@classmethod
def from_path(cls, path):
"""Instantiate an Artifact based on provided path.
Expand Down Expand Up @@ -473,6 +497,39 @@ def get_changed_files(path, branch="origin/dev"):
return changed_files


def get_changed_plugins(path, branch="origin/dev"):
"""Get a list of modules or plugins in a specific branch.

Args:
branch (str, optional): The branch to compare to. Defaults to "dev".

Raises:
RuntimeError: When git request-pull fails.

Returns:
list[str]: A list of changed file paths.
"""
changed_plugins_modules = []
get_diff_pr = subprocess.Popen(
["git", "request-pull", branch, "./"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=path,
)

stdout, stderr = get_diff_pr.communicate()
stdout = stdout.decode("utf-8")

if get_diff_pr.returncode > 0:
raise RuntimeError("Could not acquire change list")
if stdout:
for line in stdout.split("\n"):
if "plugins/action/" in line or "plugins/modules/" in line:
changed_plugins_modules.append(line.split("|", 1)[0].strip())

return changed_plugins_modules


def parse_arguments():
"""Parse and return command line arguments.

Expand Down Expand Up @@ -511,6 +568,14 @@ def parse_arguments():
action="store_true",
help="Print one test per line to stdout. Default behavior prints all tests on same line.",
)
parser.add_argument(
"-m",
"--minimum",
required=False,
action="store_true",
default=False,
help="Detect only the changes from the branch request-pull.",
)
args = parser.parse_args()
return args

Expand All @@ -525,7 +590,10 @@ def parse_arguments():
artifacts = build_artifacts_from_collection(args.path)
all_artifact_manager = ArtifactManager(artifacts)

changed_files = get_changed_files(args.path, args.branch)
if args.minimum:
changed_files = get_changed_plugins(args.path, args.branch)
else:
changed_files = get_changed_files(args.path, args.branch)

changed_artifacts = []
for file in changed_files:
Expand Down
Loading