diff --git a/.bandit.baseline b/.bandit.baseline new file mode 100644 index 00000000..aa9c3fed --- /dev/null +++ b/.bandit.baseline @@ -0,0 +1,210 @@ +{ + "errors": [], + "generated_at": "2024-05-20T09:55:19Z", + "metrics": { + "./__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "./docs/ansible-doc-extractor-collections.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 18, + "nosec": 0, + "skipped_tests": 0 + }, + "./docs/source/conf.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 11, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/doc_fragments/cmci.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 259, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/module_utils/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/module_utils/cmci.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 1, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 1, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 633, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/modules/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/modules/cmci_action.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 505, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/modules/cmci_create.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 468, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/modules/cmci_delete.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 447, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/modules/cmci_get.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 535, + "nosec": 0, + "skipped_tests": 0 + }, + "./plugins/modules/cmci_update.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 477, + "nosec": 0, + "skipped_tests": 0 + }, + "_totals": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 1, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 1, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3353, + "nosec": 0, + "skipped_tests": 0 + } + }, + "results": [ + { + "code": "35 CMCI_USER = 'cmci_user'\n36 CMCI_PASSWORD = 'cmci_password'\n37 CMCI_CERT = 'cmci_cert'\n", + "col_offset": 16, + "end_col_offset": 31, + "filename": "./plugins/module_utils/cmci.py", + "issue_confidence": "MEDIUM", + "issue_cwe": { + "id": 259, + "link": "https://cwe.mitre.org/data/definitions/259.html" + }, + "issue_severity": "LOW", + "issue_text": "Possible hardcoded password: 'cmci_password'", + "line_number": 36, + "line_range": [ + 36 + ], + "more_info": "https://bandit.readthedocs.io/en/1.7.8/plugins/b105_hardcoded_password_string.html", + "test_id": "B105", + "test_name": "hardcoded_password_string" + } + ] +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index a7049dc8..872022ec 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,12 @@ docs/build/ __pycache__ venv* .venv +core tests/output ibm-ibm_zos_cics-*.tar.gz tests/integration/inventory -tests/integration/variables/cmci.yml \ No newline at end of file +tests/integration/inventory_zos.yml +tests/integration/variables/cmci.yml +tests/integration/variables/provisioning.yml +tests/integration/variables/utilities.yml +tests/integration/variables/zos.yml diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 609cd417..c55d66e4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,47 @@ ibm.ibm_zos_cics Release Notes .. contents:: Topics +v2.1.0 +====== + +Release Summary +--------------- + +General Availability of CICS provisioning modules. You can use these Ansible modules to create automation tasks that provision or deprovision, and start or stop a CICS region. Sample playbooks show you how to do this with the latest version of the Ansible IBM z/OS CICS collection. All modules were initially released with Version 1.1.0-beta as noted below. Subsequent Version 1.1.0-beta releases may include enhancements and bugfixes for these modules. Refer to the What's new of Version 1.1.0-beta releases for details. +This release replaces all the previous 1.1.0-beta* releases. +You can use the following modules for provisioning and managing CICS TS data sets +``aux_temp_storage`` for the CICS auxiliary temporary storage data set. This module was initially released as ``auxiliary_temp`` with Version 1.1.0-beta.4. The module is changed to ``aux_temp_storage`` in Version 2.1.0. +``aux_trace`` for the CICS auxiliary trace data sets. This module was initially released as ``trace`` with Version 1.1.0-beta.4. The module is changed to ``aux_trace`` in Version 2.1.0. +``csd`` for the CICS system definition data set. This module was initially released with Version 1.1.0-beta.4. +``global_catalog`` for the CICS global catalog data set. This module was initially released with Version 1.1.0-beta.4. +``local_request_queue`` for the CICS local request queue data set. This module was initially released with Version 1.1.0-beta.3. +``td_intrapartition`` for the CICS transient data intrapartition data set. This module was initially released as ``intrapartition`` with Version 1.1.0-beta.4. The module is changed to ``td_intrapartition`` in Version 2.1.0. +``transaction_dump`` for the CICS transaction dump data sets. This module was initially released with Version 1.1.0-beta.4. +You can use the following modules for CICS startup and shutdown operations +``region_jcl`` - Create a CICS startup JCL data set. This module replaces ``start_cics``, which was released with Version 1.1.0-beta.5. ``region_jcl`` is significantly different from ``start_cics`` in function. ``region_jcl`` creates a data set that contains the startup JCL, but doesn't perform the actual startup processing. ``region_jcl`` also supports definition and allocation of user data sets with the ``user_data_sets`` parameter. +``stop_region`` - Stop a CICS region. This module was initially released as ``stop_cics`` with Version 1.1.0-beta.5. The module is changed to ``stop_region`` in Version 2.1.0. In Version 2.1.0, ``stop_region`` supports a new input parameter, ``job_name`` so that you can use the job name, which is typically the CICS's APPLID, to identify a running CICS region. +The group name for the CICS provisioning modules is ``region``. However, in the Version 1.1.0-beta releases, the group name was ``region_group``. +CICS provisioning modules provide support for all in-service CICS TS releases including the latest CICS TS 6.2. + +Deprecated Features +------------------- + +- The group name for the CMCI modules is changed to ``cmci`` instead of ``cmci_group``. ``cmci_group`` is deprecated. + +New Modules +----------- + +- ibm.ibm_zos_cics.aux_temp_storage - Create and remove the CICS auxiliary temporary storage data set +- ibm.ibm_zos_cics.aux_trace - Allocate auxiliary trace data sets +- ibm.ibm_zos_cics.csd - Create, remove, and manage the CICS CSD +- ibm.ibm_zos_cics.global_catalog - Create, remove, and manage the CICS global catalog +- ibm.ibm_zos_cics.local_catalog - Create, remove, and manage the CICS local catalog +- ibm.ibm_zos_cics.local_request_queue - Create and remove the CICS local request queue +- ibm.ibm_zos_cics.region_jcl - Create CICS startup JCL data set +- ibm.ibm_zos_cics.stop_region - Stop a CICS region +- ibm.ibm_zos_cics.td_intrapartition - Create and remove the CICS transient data intrapartition data set +- ibm.ibm_zos_cics.transaction_dump - Allocate transaction dump data sets + v2.0.0 ====== diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b7e43353..6fd83264 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Developer guide ### Licensing diff --git a/README.md b/README.md index f4778816..5baefcad 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ The **IBM® z/OS® CICS® collection**, also represented as **ibm\_zos\_cics** in this document, is part of the broader initiative to bring Ansible Automation to IBM Z® through the offering **Red Hat® Ansible Certified Content for IBM Z®**. The **IBM z/OS CICS collection** supports management of CICS -resources and definitions via the CMCI REST API provided by CICS. +resources and definitions through the CMCI REST API provided by CICS as well as provisioning of standalone CICS regions. This CICS collection works in conjunction with other Ansible collections for IBM Z, such as the [IBM z/OS core collection](https://github.com/ansible-collections/ibm_zos_core). @@ -38,7 +38,11 @@ For guides and reference, please review the [documentation](https://ibm.github.i The IBM CICS collection includes [modules](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_cics/docs/source/modules.html), [sample playbooks](https://github.com/IBM/z_ansible_collections_samples), -and ansible-doc to automate tasks in CICS. +and ansible-doc to: + +- Automate tasks in CICS. +- Provision or deprovision CICS regions. +- Start or stop a CICS region. ## Contributing diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 29ac4bd6..5271f110 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -11,6 +11,16 @@ plugins: inventory: {} lookup: {} module: + aux_temp_storage: + description: Create and remove the CICS auxiliary temporary storage data set + name: aux_temp_storage + namespace: '' + version_added: 2.1.0 + aux_trace: + description: Allocate auxiliary trace data sets + name: aux_trace + namespace: '' + version_added: 2.1.0 cmci_action: description: Perform actions on CICS and CICSPlex SM resources name: cmci_action @@ -36,9 +46,49 @@ plugins: name: cmci_update namespace: '' version_added: 1.0.0 + csd: + description: Create, remove, and manage the CICS CSD + name: csd + namespace: '' + version_added: 2.1.0 + global_catalog: + description: Create, remove, and manage the CICS global catalog + name: global_catalog + namespace: '' + version_added: 2.1.0 + local_catalog: + description: Create, remove, and manage the CICS local catalog + name: local_catalog + namespace: '' + version_added: 2.1.0 + local_request_queue: + description: Create and remove the CICS local request queue + name: local_request_queue + namespace: '' + version_added: 2.1.0 + region_jcl: + description: Create CICS startup JCL data set + name: region_jcl + namespace: '' + version_added: 2.1.0 + stop_region: + description: Stop a CICS region + name: stop_region + namespace: '' + version_added: 2.1.0 + td_intrapartition: + description: Create and remove the CICS transient data intrapartition data set + name: td_intrapartition + namespace: '' + version_added: 2.1.0 + transaction_dump: + description: Allocate transaction dump data sets + name: transaction_dump + namespace: '' + version_added: 2.1.0 netconf: {} shell: {} strategy: {} test: {} vars: {} -version: 2.0.0 +version: 2.1.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 522316f3..e6198bd7 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -85,3 +85,99 @@ releases: fragments: - 2.0.0.yml release_date: '2024-05-08' + 2.1.0: + changes: + deprecated_features: + - The group name for the CMCI modules is changed to ``cmci`` instead of ``cmci_group``. + ``cmci_group`` is deprecated. + release_summary: 'General Availability of CICS provisioning modules. You can + use these Ansible modules to create automation tasks that provision or deprovision, + and start or stop a CICS region. Sample playbooks show you how to do this + with the latest version of the Ansible IBM z/OS CICS collection. All modules + were initially released with Version 1.1.0-beta as noted below. Subsequent + Version 1.1.0-beta releases may include enhancements and bugfixes for these + modules. Refer to the What''s new of Version 1.1.0-beta releases for details. + + This release replaces all the previous 1.1.0-beta* releases. + + You can use the following modules for provisioning and managing CICS TS data + sets + + ``aux_temp_storage`` for the CICS auxiliary temporary storage data set. This + module was initially released as ``auxiliary_temp`` with Version 1.1.0-beta.4. + The module is changed to ``aux_temp_storage`` in Version 2.1.0. + + ``aux_trace`` for the CICS auxiliary trace data sets. This module was initially + released as ``trace`` with Version 1.1.0-beta.4. The module is changed to + ``aux_trace`` in Version 2.1.0. + + ``csd`` for the CICS system definition data set. This module was initially + released with Version 1.1.0-beta.4. + + ``global_catalog`` for the CICS global catalog data set. This module was initially + released with Version 1.1.0-beta.4. + + ``local_request_queue`` for the CICS local request queue data set. This module + was initially released with Version 1.1.0-beta.3. + + ``td_intrapartition`` for the CICS transient data intrapartition data set. + This module was initially released as ``intrapartition`` with Version 1.1.0-beta.4. + The module is changed to ``td_intrapartition`` in Version 2.1.0. + + ``transaction_dump`` for the CICS transaction dump data sets. This module + was initially released with Version 1.1.0-beta.4. + + You can use the following modules for CICS startup and shutdown operations + + ``region_jcl`` - Create a CICS startup JCL data set. This module replaces + ``start_cics``, which was released with Version 1.1.0-beta.5. ``region_jcl`` + is significantly different from ``start_cics`` in function. ``region_jcl`` + creates a data set that contains the startup JCL, but doesn''t perform the + actual startup processing. ``region_jcl`` also supports definition and allocation + of user data sets with the ``user_data_sets`` parameter. + + ``stop_region`` - Stop a CICS region. This module was initially released as + ``stop_cics`` with Version 1.1.0-beta.5. The module is changed to ``stop_region`` + in Version 2.1.0. In Version 2.1.0, ``stop_region`` supports a new input parameter, + ``job_name`` so that you can use the job name, which is typically the CICS''s + APPLID, to identify a running CICS region. + + The group name for the CICS provisioning modules is ``region``. However, in + the Version 1.1.0-beta releases, the group name was ``region_group``. + + CICS provisioning modules provide support for all in-service CICS TS releases + including the latest CICS TS 6.2.' + fragments: + - 2.1.0.yml + modules: + - description: Create and remove the CICS auxiliary temporary storage data set + name: aux_temp_storage + namespace: '' + - description: Allocate auxiliary trace data sets + name: aux_trace + namespace: '' + - description: Create, remove, and manage the CICS CSD + name: csd + namespace: '' + - description: Create, remove, and manage the CICS global catalog + name: global_catalog + namespace: '' + - description: Create, remove, and manage the CICS local catalog + name: local_catalog + namespace: '' + - description: Create and remove the CICS local request queue + name: local_request_queue + namespace: '' + - description: Create CICS startup JCL data set + name: region_jcl + namespace: '' + - description: Stop a CICS region + name: stop_region + namespace: '' + - description: Create and remove the CICS transient data intrapartition data set + name: td_intrapartition + namespace: '' + - description: Allocate transaction dump data sets + name: transaction_dump + namespace: '' + release_date: '2024-06-12' diff --git a/dev-requirements.txt b/dev-requirements.txt index 5f6b3c2b..d6ea74a6 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -13,6 +13,6 @@ shellcheck-py==0.9.0.5 rstcheck==6.1.2 yamllint==1.32.0 voluptuous==0.13.1 -ansible-lint==6.22.0 +ansible-lint==6.22.1 pycodestyle==2.10.0 bandit==1.7.8 diff --git a/doc-requirements.txt b/doc-requirements.txt index 56e66b52..76eea0af 100644 --- a/doc-requirements.txt +++ b/doc-requirements.txt @@ -1,7 +1,7 @@ -# (c) Copyright IBM Corp. 2020,2023 +# (c) Copyright IBM Corp. 2020,2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) -r dev-requirements.txt ansible-doc-extractor==0.1.11 Sphinx==6.2.1 -sphinx-rtd-theme==1.2.2 \ No newline at end of file +sphinx-rtd-theme==1.2.2 diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..56b999e1 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,5 @@ +Docs generated under source/modules are automatically generated. +Other .rst files have to be manually updated +If you are running in a devcontainer, you can run 'update-docs.py' to generate updates to the modules .rst documents +Otherwise, you will need to run 'pip install -r /workspace/collections/ansible_collections/ibm/ibm_zos_cics/doc-requirements.txt' +Then run, 'apt-get updates' followed by 'apt-get install make' before you can run 'update-docs.py' \ No newline at end of file diff --git a/docs/ansible-doc-extractor-collections.py b/docs/ansible-doc-extractor-collections.py index ed2a27f8..b4affab3 100644 --- a/docs/ansible-doc-extractor-collections.py +++ b/docs/ansible-doc-extractor-collections.py @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function __metaclass__ = type @@ -7,7 +7,7 @@ import os # This needs to be set before the imports get evaluated! -os.environ['ANSIBLE_COLLECTIONS_PATHS'] = os.path.abspath('../../../..') +os.environ['ANSIBLE_COLLECTIONS_PATH'] = os.path.abspath('../../../..') from ansible_doc_extractor.cli import render_docs @@ -22,4 +22,4 @@ ) ) ) - sys.exit(render_docs('source/modules', modules, open('templates/module.rst.j2'))) + sys.exit(render_docs('source/modules', modules, open('templates/module.rst.j2'), False)) diff --git a/docs/ansible_content.rst b/docs/ansible_content.rst index 5de60ad7..227d8570 100644 --- a/docs/ansible_content.rst +++ b/docs/ansible_content.rst @@ -1,5 +1,5 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020,2021 . +.. © Copyright IBM Corporation 2020,2024 . .. ........................................................................... ========= @@ -7,18 +7,22 @@ z/OS CICS ========= The **IBM® z/OS® CICS® collection**, also represented as -ibm_zos_cics in this document, is part of the broader +ibm_zos_cics in this document, is part of the broader initiative to bring Ansible® Automation to IBM Z® through the offering **Red Hat® Ansible Certified Content for IBM Z**. -The **IBM® z/OS® CICS® collection** supports automation tasks that can -define, install, and perform actions on CICS definitions and resources such as -creating a PROGRAM definition, installing and updating it, and deleting the -definition. +The **IBM® z/OS® CICS® collection** provides modules for automation tasks that +perform operations on CICS and CICSPlex SM resources and definitions, for example, +creating and installing a PROGRAM definition, then updating or deleting the definition. +These modules interact with the `CMCI REST API`_ of the CICS® management client +interface (CMCI) for system management. -The Ansible modules in this collection are written in Python and interact with -the `CMCI REST API`_ of the CICS® management client interface (CMCI) for system -management. +The **IBM® z/OS® CICS® collection** also provides modules for provisioning and managing +CICS TS data sets and utilities. You can use these Ansible modules to create automation +tasks that provision or deprovision a CICS region and tasks for CICS startup and +shutdown. + +The Ansible modules in this collection are written in Python. .. _CMCI REST API: https://www.ibm.com/docs/en/cics-ts/latest?topic=cmci-how-it-works-rest-api diff --git a/docs/source/.gitignore b/docs/source/.gitignore deleted file mode 100644 index dc428f0e..00000000 --- a/docs/source/.gitignore +++ /dev/null @@ -1 +0,0 @@ -modules \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index ad44cab6..e46add37 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2020 +# (c) Copyright IBM Corp. 2020,2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) # Configuration file for the Sphinx documentation builder. # @@ -20,11 +20,11 @@ # -- Project information ----------------------------------------------------- project = 'IBM Z CICS Collection' -copyright = '2020, IBM' +copyright = '2024, IBM' author = 'IBM' # The full version, including alpha/beta/rc tags -release = '2.0.0' +release = '2.1.0' # -- General configuration --------------------------------------------------- diff --git a/docs/source/index.rst b/docs/source/index.rst index 623c3014..ae88ea66 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -44,7 +44,7 @@ The IBM z/OS CICS collection includes `modules`_ and ansible-doc to automate tas Copyright ========= -© Copyright IBM Corporation 2020 +© Copyright IBM Corporation 2023 License ======= diff --git a/docs/source/installation.rst b/docs/source/installation.rst index fa2eebf2..e09ba5f9 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -5,14 +5,14 @@ Installation ============ -Always check that your control node has fulfilled the :doc:`requirements` before installing the **IBM® z/OS® CICS® collection**. +Always check that your control node has met the :doc:`requirements` before you install the **IBM® z/OS® CICS® collection**. -Then, follow the guidance to install the collection from Ansible® Galaxy or a custom Galaxy server. More ways to install an Ansible collection are documented at `installing collections`_. +Then, follow the guidance to install the collection from Ansible® Galaxy or from a custom Galaxy server. More ways to install an Ansible collection are documented in `Installing collections`_. Installing from Ansible Galaxy ------------------------------ -This is the quickest way to install the CICS collection. From your CLI, enter: +This is the quickest way to install the CICS collection. From your CLI, enter the following command: .. code-block:: sh @@ -38,7 +38,7 @@ By default, collections are installed in ``~/.ansible/collections``. After insta -To install with customization, such as specifying another installation path or using a playbook, see `installing collections`_. +To install with customization, such as specifying another installation path or using a playbook, see `Installing collections`_. .. _installing collections: https://docs.ansible.com/ansible/latest/user_guide/collections_using.html#installing-collections-with-ansible-galaxy @@ -57,7 +57,7 @@ Ansible searches for ``ansible.cfg`` in the following locations in this order: * ~/.ansible.cfg (in the home directory) * /etc/ansible/ansible.cfg -Instructions on how to configure the server list in ``ansible.cfg`` can be found at `configuring the ansible-galaxy client`_. Available options in the Ansible configuration file can be found at `Ansible Configuration Settings`_. +Instructions on how to configure the server list in ``ansible.cfg`` can be found in `Configuring the ansible-galaxy client`_. Available options in the Ansible configuration file can be found in `Ansible Configuration Settings`_. .. note:: When hosting a private Galaxy server, available content is not always consistent with what is available on the community Galaxy server. diff --git a/docs/source/modules.rst b/docs/source/modules.rst index 91743875..68863a68 100644 --- a/docs/source/modules.rst +++ b/docs/source/modules.rst @@ -9,6 +9,77 @@ Modules Modules can be used in a playbook to automate tasks. Ansible® executes each module on the target node and returns the result back to the controller. +The **IBM® z/OS® CICS® collection** provides two categories of modules: + +* Modules for working with CICS and CICSPlex SM resources and definitions. + These modules interact with CMCI over an HTTP connection by leveraging + the `CMCI REST API`_. These modules are collectively referred to as + **CMCI modules** in documentation. +* Modules for provisioning and deprovisioning of CICS TS regions and for + CICS startup and shutdown operations. These modules are collectively + referred to as CICS **provisioning modules** in documentation. + +These modules have different requirements of the managed node. For details, see :doc:`requirements_managed`. + +.. _CMCI REST API: + https://www.ibm.com/docs/en/cics-ts/latest?topic=cmci-how-it-works-rest-api + + +Using Defaults Groups in CICS Provisioning Modules +--------------- + +The CICS provisioning modules use several defaults groups. In particular, these two defaults groups are used for specific purposes: + +* ``cics_data_sets`` can be used to specify the location of a CICS installation. +* ``region_data_sets`` can be used to specify a high level qualifier for the data sets used by a single CICS region. + +The example below shows how to use these default groups within the **global_catalog** module. + +.. code-block:: yaml+jinja + + + - name: Initialize a global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "initial" + + +In the above example, the global catalog is created at the data set location of ``REGIONS.ABCD0001.DFHGCD``, +and the CICS load libraries can be found at ``CICSTS61.CICS``, which means that the SDFHLOAD library can be +found at ``CICSTS61.CICS.SDFHLOAD``. + +These groups can be placed in a `module_defaults`_ section, which means that all +the CICS provisioning modules use the same high level qualifier for the +region data sets, and the location of the CICS installation only has to be +declared once for all the modules. + +To override the data set location or name for a specific task, you can provide an +additional parameter to the ``region_data_sets`` group as shown in the example +for a global catalog data set below. + +.. code-block:: yaml+jinja + + + - name: Initialize a global catalog with a custom name + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "MY.CICS.GLOBAL.CATALOG" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "initial" + + +.. _module_defaults: + https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_module_defaults.html + + +Module Reference +--------------- + The **IBM® z/OS® CICS® collection** contains these modules. For each module, the accepted parameters, return values, and examples are provided in the documentation. diff --git a/docs/source/modules/aux_temp_storage.rst b/docs/source/modules/aux_temp_storage.rst new file mode 100644 index 00000000..92dff8d3 --- /dev/null +++ b/docs/source/modules/aux_temp_storage.rst @@ -0,0 +1,330 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/aux_temp_storage.py + +.. _aux_temp_storage_module: + + +aux_temp_storage -- Create and remove the CICS auxiliary temporary storage data set +=================================================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create and remove the \ `auxiliary temporary storage `__\ data set used by a CICS® region. +- You can use this module when provisioning or de-provisioning a CICS region. +- Use the \ :literal:`state`\ option to specify the intended state for the auxiliary temporary storage data set. For example, use \ :literal:`state=initial`\ to create an auxiliary temporary storage data set if it doesn't exist. + + + + + +Parameters +---------- + + + +region_data_sets + The location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . + + If you want to use a data set that already exists, ensure that the data set is an auxiliary temporary storage data set. + + + | **required**: True + | **type**: dict + + + + dfhtemp + Overrides the templated location for the auxiliary temporary storage data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the auxiliary temporary storage to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets with a template. + + + | **required**: False + | **type**: str + + + + +space_primary + The size of the primary space allocated to the auxiliary temporary storage data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the auxiliary temporary storage data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 200 + + + +space_secondary + The size of the secondary space allocated to the auxiliary temporary storage data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the auxiliary temporary storage data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 10 + + + +space_type + The unit portion of the auxiliary temporary storage data set size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the auxiliary temporary storage data set is being created. If the data set already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), records (\ :literal:`REC`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + + | **required**: False + | **type**: str + | **default**: REC + | **choices**: M, K, REC, CYL, TRK + + + +state + The intended state for the auxiliary temporary storage data set, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the auxiliary temporary storage data set entirely, if it already exists. + + Specify \ :literal:`initial`\ to create the auxiliary temporary storage data set, if it does not exist. If the specified data set exists but is empty, the module leaves the data set as is. If the specified data set exists and has contents, the module deletes the data set and then creates a new, empty data set. + + Specify \ :literal:`warm`\ to retain an existing auxiliary temporary storage data set in its current state. The module checks whether the specified data set exists, and if it does, leaves the data set as is. If the data set does not exist, the operation fails. + + + | **required**: True + | **type**: str + | **choices**: initial, absent, warm + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Initialize an auxiliary temporary storage data set by using the templated location + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "initial" + + - name: Initialize a user specified auxiliary temporary storage data set + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + dfhtemp: + dsn: "REGIONS.ABCD0001.DFHTEMP" + state: "initial" + + - name: Initialize a large auxiliary temporary storage data set by using the templated location + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + space_primary: 50 + space_type: "M" + state: "initial" + + - name: Retain the existing state of an auxiliary temporary storage data set defined by the template + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + + - name: Retain the existing state of a user specified auxiliary temporary storage data set + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + dfhtemp: + dsn: "REGIONS.ABCD0001.DFHTEMP" + state: "warm" + + - name: Delete an existing auxiliary temporary storage data set defined by the template + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "absent" + + - name: Delete an existing user specified auxiliary temporary storage data set + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + dfhtemp: + dsn: "REGIONS.ABCD0001.DFHTEMP" + state: "absent" + + + + + + + + + +Return Values +------------- + + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the auxiliary temporary storage data set before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the specified auxiliary temporary storage data set exists. + + | **returned**: always + | **type**: bool + + + + + + end_state + | The state of the auxiliary temporary storage data set at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the specified auxiliary temporary storage data set exists. + + | **returned**: always + | **type**: bool + + + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard output stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/aux_trace.rst b/docs/source/modules/aux_trace.rst new file mode 100644 index 00000000..1f4a4084 --- /dev/null +++ b/docs/source/modules/aux_trace.rst @@ -0,0 +1,415 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/aux_trace.py + +.. _aux_trace_module: + + +aux_trace -- Allocate auxiliary trace data sets +=============================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Allocates the two \ `auxiliary trace `__\ data sets used by a CICS® region. When CICS auxiliary trace is activated, trace entries produced by CICS are written to the auxiliary trace data sets. These data sets can hold large amounts of trace data. +- The two data sets are referred to as auxiliary trace data set A (DFHAUXT) and auxiliary trace data set B (DFHBUXT). + + + + + +Parameters +---------- + + + +destination + Identify which one of the auxiliary trace data sets is the target of the operation. If the value is left blank, A is implied, but you can specify A or B. + + Specify \ :literal:`A`\ to create or delete the A data set. + + Specify \ :literal:`B`\ to create or delete the B data set. This MUST be set for the creation of the B data set. + + + | **required**: False + | **type**: str + | **default**: A + | **choices**: A, B + + + +region_data_sets + The location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . + + If you want to use a data set that already exists, ensure that the data set is an auxiliary trace data set. + + + | **required**: True + | **type**: dict + + + + dfhauxt + Overrides the templated location for the DFHAUXT data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of DFHAUXT to override the template. + + + | **required**: False + | **type**: str + + + + + dfhbuxt + Overrides the templated location for the DFHBUXT data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of DFHBUXT to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets with a template. + + + | **required**: False + | **type**: str + + + + +space_primary + The size of the primary space allocated to the auxiliary trace data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the auxiliary trace data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 20 + + + +space_secondary + The size of the secondary space allocated to the auxiliary trace data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the auxiliary trace data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 4 + + + +space_type + The unit portion of the auxiliary trace data set size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the auxiliary trace data set is being created. If the data set already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + + | **required**: False + | **type**: str + | **default**: M + | **choices**: M, K, CYL, TRK + + + +state + The intended state for the auxiliary trace data set, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the auxiliary trace data set data set entirely, if it exists. + + Specify \ :literal:`initial`\ to create the auxiliary trace data set if it does not exist. If the specified data set exists but is empty, the module leaves the data set as is. If the specified data set exists and has contents, the module deletes the data set and then creates a new, empty one. + + Specify \ :literal:`warm`\ to retain an existing auxiliary trace data set in its current state. The module checks whether the specified data set exists, and if it does, leaves the data set as is. If the data set does not exist, the operation fails. + + + | **required**: True + | **type**: str + | **choices**: initial, absent, warm + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Allocate auxiliary trace data set A (implicit) by using the templated location + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + + - name: Allocate a user specified data set as auxiliary trace data set A (implicit) + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhauxt: + dsn: "REGIONS.ABCD0001.DFHAUXT" + state: initial + + - name: Allocate auxiliary trace data set A by using the templated location + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + destination: A + + - name: Allocate a user specified data set as auxiliary trace data set A + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhauxt: + dsn: "REGIONS.ABCD0001.DFHAUXT" + state: initial + destination: A + + - name: Allocate auxiliary trace data set B by using the templated location + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + destination: B + + - name: Allocate a user specified data set as auxiliary trace data set B + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhbuxt: + dsn: "REGIONS.ABCD0001.DFHBUXT" + state: initial + destination: B + + - name: Retain the existing state of auxiliary trace data set A (implicit) defined by the template + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + + - name: Retain the existing state of a user specified auxiliary trace data set A (implicit) + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhauxt: + dsn: "REGIONS.ABCD0001.DFHAUXT" + state: "warm" + + - name: Retain the existing state of auxiliary trace data set B defined by the template + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + destination: B + + - name: Retain the existing state of a user specified auxiliary trace data set B + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhbuxt: + dsn: "REGIONS.ABCD0001.DFHBUXT" + state: "warm" + destination: B + + - name: Delete auxiliary trace data set A (implicit) defined by the template + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: absent + + - name: Delete a user specified auxiliary trace data set A (implicit) + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhauxt: + dsn: "REGIONS.ABCD0001.DFHBUXT" + state: absent + + - name: Delete auxiliary trace data set B defined by the template + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: absent + destination: B + + - name: Delete a user specified auxiliary trace data set B + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhbuxt: + dsn: "REGIONS.ABCD0001.DFHBUXT" + state: absent + destination: B + + + + + + + + + +Return Values +------------- + + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the auxiliary trace data set before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: Sequential + + + + + + exists + | True if the specified auxiliary trace data set exists. + + | **returned**: always + | **type**: bool + + + + + + end_state + | The state of the auxiliary trace data set at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: Sequential + + + + + + exists + | True if the specified auxiliary trace data set exists. + + | **returned**: always + | **type**: bool + + + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard output stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/cmci_action.rst b/docs/source/modules/cmci_action.rst index ef762942..877481ae 100644 --- a/docs/source/modules/cmci_action.rst +++ b/docs/source/modules/cmci_action.rst @@ -1,9 +1,9 @@ .. ............................................................................... -.. © Copyright IBM Corporation 2020 . +.. © Copyright IBM Corporation 2020,2023 . .. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . .. ............................................................................... -:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/dev/plugins/modules/cmci_action.py +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/cmci_action.py .. _cmci_action_module: @@ -20,7 +20,7 @@ cmci_action -- Perform actions on CICS and CICSPlex SM resources Synopsis -------- -- Perform actions on CICS® or CICSPlex® SM definitions and resources, by initiating PUT requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see `CMCI REST API `_. For information about how to compose PUT requests, see `CMCI PUT requests `_. +- Perform actions on CICS® or CICSPlex® SM definitions and resources, by initiating PUT requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see \ `CMCI REST API `__\ . For information about how to compose PUT requests, see \ `CMCI PUT requests `__\ . @@ -32,7 +32,7 @@ Parameters action_name - The name of the target action. To find the name of the appropriate action, consult the CICSPlex SM resource tables for the target resource type. For example, the `PROGRAM resource table reference `_ lists the eligible actions for CICS programs. + The name of the target action. To find the name of the appropriate action, consult the CICSPlex SM resource tables for the target resource type. For example, the \ `PROGRAM resource table reference `__\ lists the eligible actions for CICS programs. @@ -42,7 +42,7 @@ action_name action_parameters - A list of one or more parameters that control the *action* operation. Eligible actions and corresponding parameters for the target operation can be found in the resource table reference for the target resource type, as listed in the PERFORM SET operation section of the "Valid CPSM operations" table. For example, the valid parameters for a PROGDEF CSDCOPY action are ``AS_RESOURCE``, ``DUPACTION`` and ``TO_CSDGROUP``, as found in the `PROGDEF resource table reference `_. + A list of one or more parameters that control the \ :emphasis:`action`\ operation. Eligible actions and corresponding parameters for the target operation can be found in the resource table reference for the target resource type, as listed in the PERFORM SET operation section of the "Valid CPSM operations" table. For example, the valid parameters for a PROGDEF CSDCOPY action are \ :literal:`AS\_RESOURCE`\ , \ :literal:`DUPACTION`\ and \ :literal:`TO\_CSDGROUP`\ , as found in the \ `PROGDEF resource table reference `__\ . @@ -74,11 +74,11 @@ action_parameters cmci_cert Location of the PEM-formatted certificate chain file to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_CERT. + Can also be specified using the environment variable CMCI\_CERT. - Required if *cmci_key* is specified. + Required if \ :emphasis:`cmci\_key`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -98,11 +98,11 @@ cmci_host cmci_key Location of the PEM-formatted file storing your private key to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_KEY. + Can also be specified using the environment variable CMCI\_KEY. - Required if *cmci_cert* is specified. + Required if \ :emphasis:`cmci\_cert`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -111,13 +111,13 @@ cmci_key cmci_password - The password of *cmci_user* to pass HTTP basic authentication. + The password of \ :emphasis:`cmci\_user`\ to pass HTTP basic authentication. - Can also be specified using the environment variable CMCI_PASSWORD. + Can also be specified using the environment variable CMCI\_PASSWORD. - Required if *cmci_user* is specified. + Required if \ :emphasis:`cmci\_user`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -137,11 +137,11 @@ cmci_port cmci_user The user ID under which the CMCI request will run. - Can also be specified using the environment variable CMCI_USER. + Can also be specified using the environment variable CMCI\_USER. - Required if *cmci_password* is specified. + Required if \ :emphasis:`cmci\_password`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -150,11 +150,11 @@ cmci_user context - If CMCI is installed in a CICSPlex® SM environment, *context* is the name of the CICSplex or CMAS associated with the request, for example, ``PLEX1``. To determine whether a CMAS can be specified as *context*, see the **CMAS context** entry in the CICSPlex SM resource table reference of a resource. For example, according to the `PROGRAM resource table `_, CMAS context is not supported for PROGRAM. + If CMCI is installed in a CICSPlex® SM environment, \ :emphasis:`context`\ is the name of the CICSplex or CMAS associated with the request, for example, \ :literal:`PLEX1`\ . To determine whether a CMAS can be specified as \ :emphasis:`context`\ , see the \ :strong:`CMAS context`\ entry in the CICSPlex SM resource table reference of a resource. For example, according to the \ `PROGRAM resource table `__\ , CMAS context is not supported for PROGRAM. - If CMCI is installed in a single region (SMSS), *context* is the APPLID of the CICS region associate with the request. + If CMCI is installed in a single region (SMSS), \ :emphasis:`context`\ is the APPLID of the CICS region associate with the request. - The value of *context* must contain no spaces. *context* is not case-sensitive. + The value of \ :emphasis:`context`\ must contain no spaces. \ :emphasis:`context`\ is not case-sensitive. | **required**: True @@ -163,7 +163,7 @@ context insecure - When set to ``true``, disables SSL certificate trust chain verification when using HTTPS. + When set to \ :literal:`true`\ , disables SSL certificate trust chain verification when using HTTPS. | **required**: False @@ -181,17 +181,17 @@ resources complex_filter - A dictionary representing a complex filter expression. Complex filters are composed of filter expressions, represented as dictionaries. Each dictionary can specify either an attribute expression, a list of filter expressions to be composed with the ``and`` operator, or a list of filter expressions to be composed with the ``or`` operator. + A dictionary representing a complex filter expression. Complex filters are composed of filter expressions, represented as dictionaries. Each dictionary can specify either an attribute expression, a list of filter expressions to be composed with the \ :literal:`and`\ operator, or a list of filter expressions to be composed with the \ :literal:`or`\ operator. - The ``attribute``, ``and`` and ``or`` options are mutually exclusive with each other. + The \ :literal:`attribute`\ , \ :literal:`and`\ and \ :literal:`or`\ options are mutually exclusive with each other. - Can contain one or more filters. Multiple filters must be combined using ``and`` or ``or`` logical operators. + Can contain one or more filters. Multiple filters must be combined using \ :literal:`and`\ or \ :literal:`or`\ logical operators. Filters can be nested. - When supplying the ``attribute`` option, you must also supply a ``value`` for the filter. You can also override the default operator of ``=`` with the ``operator`` option. + When supplying the \ :literal:`attribute`\ option, you must also supply a \ :literal:`value`\ for the filter. You can also override the default operator of \ :literal:`=`\ with the \ :literal:`operator`\ option. - For examples, see "Examples" in :ref:`cmci_get `. + For examples, see "Examples" in \ :ref:`ibm.ibm\_zos\_cics.cmci\_get `\ . | **required**: False @@ -200,9 +200,9 @@ resources and - A list of filter expressions to be combined with an ``and`` operation. + A list of filter expressions to be combined with an \ :literal:`and`\ operation. - Filter expressions are nested ``complex_filter`` elements. Each nested filter expression can be either an ``attribute``, ``and`` or ``or`` complex filter expression. + Filter expressions are nested \ :literal:`complex\_filter`\ elements. Each nested filter expression can be either an \ :literal:`attribute`\ , \ :literal:`and`\ or \ :literal:`or`\ complex filter expression. | **required**: False @@ -213,7 +213,7 @@ resources attribute The name of a resource table attribute on which to filter. - For supported attributes of different resource types, see their resource table reference, for example, `PROGDEF resource table reference `_. + For supported attributes of different resource types, see their resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -222,7 +222,7 @@ resources operator - These operators are accepted: ``<`` or ``LT`` (less than), ``<=`` or ``LE`` (less than or equal to), ``=`` or ``EQ`` (equal to), ``>`` or ``GT`` (greater than), ``>=`` or ``GE`` (greater than or equal to), ``==`` or ``IS`` (is), ``¬=``, ``!=``, or ``NE`` (not equal to). If not supplied when ``attribute`` is used, ``EQ`` is assumed. + These operators are accepted: \ :literal:`\<`\ or \ :literal:`LT`\ (less than), \ :literal:`\<=`\ or \ :literal:`LE`\ (less than or equal to), \ :literal:`=`\ or \ :literal:`EQ`\ (equal to), \ :literal:`\>`\ or \ :literal:`GT`\ (greater than), \ :literal:`\>=`\ or \ :literal:`GE`\ (greater than or equal to), \ :literal:`==`\ or \ :literal:`IS`\ (is), \ :literal:`¬=`\ , \ :literal:`!=`\ , or \ :literal:`NE`\ (not equal to). If not supplied when \ :literal:`attribute`\ is used, \ :literal:`EQ`\ is assumed. @@ -233,9 +233,9 @@ resources or - A list of filter expressions to be combined with an ``or`` operation. + A list of filter expressions to be combined with an \ :literal:`or`\ operation. - Filter expressions are nested ``complex_filter`` elements. Each nested filter expression can be either an ``attribute``, ``and`` or ``or`` complex filter expression. + Filter expressions are nested \ :literal:`complex\_filter`\ elements. Each nested filter expression can be either an \ :literal:`attribute`\ , \ :literal:`and`\ or \ :literal:`or`\ complex filter expression. | **required**: False @@ -246,7 +246,7 @@ resources value The value by which you are to filter the resource attributes. - The value must be a valid one for the resource table attribute as documented in the resource table reference, for example, `PROGDEF resource table reference `_. + The value must be a valid one for the resource table attribute as documented in the resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -258,21 +258,21 @@ resources filter A dictionary with attribute names as keys, and target values, to be used as criteria to filter the set of resources returned from CICSPlex SM. - Filters implicitly use the ``=`` operator. + Filters implicitly use the \ :literal:`=`\ operator. - Filters for ``string`` type attributes can use the ``*`` and ``+`` wildcard operators. + Filters for \ :literal:`string`\ type attributes can use the \ :literal:`\*`\ and \ :literal:`+`\ wildcard operators. - ``*`` is a wildcard representing an unknown number of characters, and must appear at the end of the value. + \ :literal:`\*`\ is a wildcard representing an unknown number of characters, and must appear at the end of the value. - ``+`` is a wildcard representing a single character, and can appear in any place in the value, potentially multiple times. + \ :literal:`+`\ is a wildcard representing a single character, and can appear in any place in the value, potentially multiple times. - To use more complicated filter expressions, including a range of different filter operators, and the ability to compose filters with ``and`` and ``or`` operators, see the ``complex_filter`` parameter. + To use more complicated filter expressions, including a range of different filter operators, and the ability to compose filters with \ :literal:`and`\ and \ :literal:`or`\ operators, see the \ :literal:`complex\_filter`\ parameter. - For more details, see `How to build a filter expression `_. + For more details, see \ `How to build a filter expression `__\ . - For examples, see :ref:`cmci_get ` + For examples, see \ :ref:`ibm.ibm\_zos\_cics.cmci\_get `\ . - For supported attributes of different resource types, see their resource table reference, for example, `PROGDEF resource table reference `_. + For supported attributes of different resource types, see their resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -281,7 +281,7 @@ resources get_parameters - A list of one or more parameters with optional values used to identify the resources for this request. Eligible parameters for identifying the target resources can be found in the resource table reference for the target resource type, as valid parameters for the GET operation in the "Valid CPSM operations" table. For example, the valid parameters for identifying a PROGDEF resource are CICSSYS, CSDGROUP and RESGROUP, as found in the `PROGDEF resource table reference `_. + A list of one or more parameters with optional values used to identify the resources for this request. Eligible parameters for identifying the target resources can be found in the resource table reference for the target resource type, as valid parameters for the GET operation in the "Valid CPSM operations" table. For example, the valid parameters for identifying a PROGDEF resource are CICSSYS, CSDGROUP and RESGROUP, as found in the \ `PROGDEF resource table reference `__\ . @@ -324,11 +324,11 @@ scheme scope Specifies the name of a CICSplex, CICS region group, CICS region, or logical scope that is associated with the query. - *scope* is a subset of *context* and limits the request to particular CICS systems or resources. + \ :emphasis:`scope`\ is a subset of \ :emphasis:`context`\ and limits the request to particular CICS systems or resources. - *scope* is optional. If it's not specified, the request is limited by the value of *context* alone. + \ :emphasis:`scope`\ is optional. If it's not specified, the request is limited by the value of \ :emphasis:`context`\ alone. - The value of *scope* must contain no spaces. *scope* is not case-sensitive. + The value of \ :emphasis:`scope`\ must contain no spaces. \ :emphasis:`scope`\ is not case-sensitive. | **required**: false @@ -336,8 +336,18 @@ scope +timeout + HTTP request timeout in seconds + + + | **required**: False + | **type**: int + | **default**: 30 + + + type - The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see `CMCI resource names `_. + The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see \ `CMCI resource names `__\ . | **required**: True @@ -354,31 +364,31 @@ Examples - name: Newcopy a program cmci_action: - cmci_host: 'winmvs2c.hursley.ibm.com' + cmci_host: "winmvs2c.hursley.ibm.com" cmci_port: 10080 - context: 'iyk3z0r9' - type: 'CICSProgram' + context: "iyk3z0r9" + type: "CICSProgram" action_name: NEWCOPY - resources: + resource: filter: - name: 'PONGALT' + name: "PONGALT" get_parameters: - - name: 'csdgroup' - value: 'JVMGRP' + - name: "csdgroup" + value: "JVMGRP" - name: install a bundle in a CICS region cmci_action: - cmci_host: 'winmvs2c.hursley.ibm.com' - cmci_port: '10080' - context: 'iyk3z0r9' + cmci_host: "winmvs2c.hursley.ibm.com" + cmci_port: "10080" + context: "iyk3z0r9" type: CICSBundle action_name: install - resources: + resource: filter: - name: 'PONGALT' + name: "PONGALT" action_parameters: - - name: 'usage' - value: 'local' + - name: "usage" + value: "local" @@ -419,7 +429,7 @@ Return Values cpsm_reason - | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2ky.html. + | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-alphabetical-order. | **returned**: success | **type**: str @@ -427,7 +437,7 @@ Return Values cpsm_reason_code - | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kw.html. + | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-numerical-order. | **returned**: success | **type**: int @@ -435,7 +445,7 @@ Return Values cpsm_response - | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kx.html. + | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-alphabetical-order. | **returned**: success | **type**: str @@ -443,7 +453,7 @@ Return Values cpsm_response_code - | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kv.html. + | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-numerical-order. | **returned**: success | **type**: str diff --git a/docs/source/modules/cmci_create.rst b/docs/source/modules/cmci_create.rst index 048f2d37..6c1d4d9c 100644 --- a/docs/source/modules/cmci_create.rst +++ b/docs/source/modules/cmci_create.rst @@ -1,9 +1,9 @@ .. ............................................................................... -.. © Copyright IBM Corporation 2020 . +.. © Copyright IBM Corporation 2020,2023 . .. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . .. ............................................................................... -:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/dev/plugins/modules/cmci_create.py +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/cmci_create.py .. _cmci_create_module: @@ -20,7 +20,7 @@ cmci_create -- Create CICS and CICSPlex SM definitions Synopsis -------- -- Create definitional CICS® or CICSPlex® SM resources in CSD and BAS repositories, by initiating POST requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see `CMCI REST API `_. For information about how to compose POST requests, see `CMCI POST requests `_. +- Create definitional CICS® or CICSPlex® SM resources in CSD and BAS repositories, by initiating POST requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see \ `CMCI REST API `__\ . For information about how to compose POST requests, see \ `CMCI POST requests `__\ . @@ -32,7 +32,7 @@ Parameters attributes - The resource attributes to be created or updated. Available attributes can be found in the CICSPlex® SM resource table reference for the target resource type, for example, `PROGDEF resource table reference `_. + The resource attributes to be created or updated. Available attributes can be found in the CICSPlex® SM resource table reference for the target resource type, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -43,11 +43,11 @@ attributes cmci_cert Location of the PEM-formatted certificate chain file to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_CERT. + Can also be specified using the environment variable CMCI\_CERT. - Required if *cmci_key* is specified. + Required if \ :emphasis:`cmci\_key`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -67,11 +67,11 @@ cmci_host cmci_key Location of the PEM-formatted file storing your private key to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_KEY. + Can also be specified using the environment variable CMCI\_KEY. - Required if *cmci_cert* is specified. + Required if \ :emphasis:`cmci\_cert`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -80,13 +80,13 @@ cmci_key cmci_password - The password of *cmci_user* to pass HTTP basic authentication. + The password of \ :emphasis:`cmci\_user`\ to pass HTTP basic authentication. - Can also be specified using the environment variable CMCI_PASSWORD. + Can also be specified using the environment variable CMCI\_PASSWORD. - Required if *cmci_user* is specified. + Required if \ :emphasis:`cmci\_user`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -106,11 +106,11 @@ cmci_port cmci_user The user ID under which the CMCI request will run. - Can also be specified using the environment variable CMCI_USER. + Can also be specified using the environment variable CMCI\_USER. - Required if *cmci_password* is specified. + Required if \ :emphasis:`cmci\_password`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -119,11 +119,11 @@ cmci_user context - If CMCI is installed in a CICSPlex® SM environment, *context* is the name of the CICSplex or CMAS associated with the request, for example, ``PLEX1``. To determine whether a CMAS can be specified as *context*, see the **CMAS context** entry in the CICSPlex SM resource table reference of a resource. For example, according to the `PROGRAM resource table `_, CMAS context is not supported for PROGRAM. + If CMCI is installed in a CICSPlex® SM environment, \ :emphasis:`context`\ is the name of the CICSplex or CMAS associated with the request, for example, \ :literal:`PLEX1`\ . To determine whether a CMAS can be specified as \ :emphasis:`context`\ , see the \ :strong:`CMAS context`\ entry in the CICSPlex SM resource table reference of a resource. For example, according to the \ `PROGRAM resource table `__\ , CMAS context is not supported for PROGRAM. - If CMCI is installed in a single region (SMSS), *context* is the APPLID of the CICS region associate with the request. + If CMCI is installed in a single region (SMSS), \ :emphasis:`context`\ is the APPLID of the CICS region associate with the request. - The value of *context* must contain no spaces. *context* is not case-sensitive. + The value of \ :emphasis:`context`\ must contain no spaces. \ :emphasis:`context`\ is not case-sensitive. | **required**: True @@ -132,7 +132,7 @@ context create_parameters - A list of one or more parameters that control the *create* operation. Eligible parameters for the CREATE operation can be found in the resource table reference for the target resource type, as listed in the CREATE operation section of the "Valid CPSM operations" table. For example, the valid parameters for a PROGDEF CREATE operation are CSD and RESGROUP, as found in the `PROGDEF resource table reference `_. + A list of one or more parameters that control the \ :emphasis:`create`\ operation. Eligible parameters for the CREATE operation can be found in the resource table reference for the target resource type, as listed in the CREATE operation section of the "Valid CPSM operations" table. For example, the valid parameters for a PROGDEF CREATE operation are CSD and RESGROUP, as found in the \ `PROGDEF resource table reference `__\ . @@ -161,7 +161,7 @@ create_parameters insecure - When set to ``true``, disables SSL certificate trust chain verification when using HTTPS. + When set to \ :literal:`true`\ , disables SSL certificate trust chain verification when using HTTPS. | **required**: False @@ -183,11 +183,11 @@ scheme scope Specifies the name of a CICSplex, CICS region group, CICS region, or logical scope that is associated with the query. - *scope* is a subset of *context* and limits the request to particular CICS systems or resources. + \ :emphasis:`scope`\ is a subset of \ :emphasis:`context`\ and limits the request to particular CICS systems or resources. - *scope* is optional. If it's not specified, the request is limited by the value of *context* alone. + \ :emphasis:`scope`\ is optional. If it's not specified, the request is limited by the value of \ :emphasis:`context`\ alone. - The value of *scope* must contain no spaces. *scope* is not case-sensitive. + The value of \ :emphasis:`scope`\ must contain no spaces. \ :emphasis:`scope`\ is not case-sensitive. | **required**: false @@ -195,8 +195,18 @@ scope +timeout + HTTP request timeout in seconds + + + | **required**: False + | **type**: int + | **default**: 30 + + + type - The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see `CMCI resource names `_. + The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see \ `CMCI resource names `__\ . | **required**: True @@ -213,16 +223,16 @@ Examples - name: define a BUNDLE in a CSD cmci_create: - cmci_host: 'winmvs2c.hursley.ibm.com' + cmci_host: "winmvs2c.hursley.ibm.com" cmci_port: 10080 - context: 'iyk3z0r9' - type: 'CICSDefinitionBundle' + context: "iyk3z0r9" + type: "CICSDefinitionBundle" attributes: name: PONGALT bundledir: /u/ibmuser/bundle/pong/pongbundle_1.0.0 csdgroup: JVMGRP create_parameters: - - name: 'csd' + - name: "csd" @@ -263,7 +273,7 @@ Return Values cpsm_reason - | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2ky.html. + | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-alphabetical-order. | **returned**: success | **type**: str @@ -271,7 +281,7 @@ Return Values cpsm_reason_code - | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kw.html. + | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-numerical-order. | **returned**: success | **type**: int @@ -279,7 +289,7 @@ Return Values cpsm_response - | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kx.html. + | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-alphabetical-order. | **returned**: success | **type**: str @@ -287,7 +297,7 @@ Return Values cpsm_response_code - | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kv.html. + | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-numerical-order. | **returned**: success | **type**: str diff --git a/docs/source/modules/cmci_delete.rst b/docs/source/modules/cmci_delete.rst index 484f5dde..5ba2e80f 100644 --- a/docs/source/modules/cmci_delete.rst +++ b/docs/source/modules/cmci_delete.rst @@ -1,9 +1,9 @@ .. ............................................................................... -.. © Copyright IBM Corporation 2020 . +.. © Copyright IBM Corporation 2020,2023 . .. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . .. ............................................................................... -:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/dev/plugins/modules/cmci_delete.py +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/cmci_delete.py .. _cmci_delete_module: @@ -20,7 +20,7 @@ cmci_delete -- Delete CICS and CICSPlex SM resources Synopsis -------- -- Remove or discard definitional or installed CICS® and CICSPlex® SM resources from CICS regions, by initiating DELETE requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see `CMCI REST API `_. For information about how to compose DELETE requests, see `CMCI DELETE requests `_. +- Remove or discard definitional or installed CICS® and CICSPlex® SM resources from CICS regions, by initiating DELETE requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see \ `CMCI REST API `__\ . For information about how to compose DELETE requests, see \ `CMCI DELETE requests `__\ . @@ -34,11 +34,11 @@ Parameters cmci_cert Location of the PEM-formatted certificate chain file to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_CERT. + Can also be specified using the environment variable CMCI\_CERT. - Required if *cmci_key* is specified. + Required if \ :emphasis:`cmci\_key`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -58,11 +58,11 @@ cmci_host cmci_key Location of the PEM-formatted file storing your private key to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_KEY. + Can also be specified using the environment variable CMCI\_KEY. - Required if *cmci_cert* is specified. + Required if \ :emphasis:`cmci\_cert`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -71,13 +71,13 @@ cmci_key cmci_password - The password of *cmci_user* to pass HTTP basic authentication. + The password of \ :emphasis:`cmci\_user`\ to pass HTTP basic authentication. - Can also be specified using the environment variable CMCI_PASSWORD. + Can also be specified using the environment variable CMCI\_PASSWORD. - Required if *cmci_user* is specified. + Required if \ :emphasis:`cmci\_user`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -97,11 +97,11 @@ cmci_port cmci_user The user ID under which the CMCI request will run. - Can also be specified using the environment variable CMCI_USER. + Can also be specified using the environment variable CMCI\_USER. - Required if *cmci_password* is specified. + Required if \ :emphasis:`cmci\_password`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -110,11 +110,11 @@ cmci_user context - If CMCI is installed in a CICSPlex® SM environment, *context* is the name of the CICSplex or CMAS associated with the request, for example, ``PLEX1``. To determine whether a CMAS can be specified as *context*, see the **CMAS context** entry in the CICSPlex SM resource table reference of a resource. For example, according to the `PROGRAM resource table `_, CMAS context is not supported for PROGRAM. + If CMCI is installed in a CICSPlex® SM environment, \ :emphasis:`context`\ is the name of the CICSplex or CMAS associated with the request, for example, \ :literal:`PLEX1`\ . To determine whether a CMAS can be specified as \ :emphasis:`context`\ , see the \ :strong:`CMAS context`\ entry in the CICSPlex SM resource table reference of a resource. For example, according to the \ `PROGRAM resource table `__\ , CMAS context is not supported for PROGRAM. - If CMCI is installed in a single region (SMSS), *context* is the APPLID of the CICS region associate with the request. + If CMCI is installed in a single region (SMSS), \ :emphasis:`context`\ is the APPLID of the CICS region associate with the request. - The value of *context* must contain no spaces. *context* is not case-sensitive. + The value of \ :emphasis:`context`\ must contain no spaces. \ :emphasis:`context`\ is not case-sensitive. | **required**: True @@ -123,7 +123,7 @@ context insecure - When set to ``true``, disables SSL certificate trust chain verification when using HTTPS. + When set to \ :literal:`true`\ , disables SSL certificate trust chain verification when using HTTPS. | **required**: False @@ -141,17 +141,17 @@ resources complex_filter - A dictionary representing a complex filter expression. Complex filters are composed of filter expressions, represented as dictionaries. Each dictionary can specify either an attribute expression, a list of filter expressions to be composed with the ``and`` operator, or a list of filter expressions to be composed with the ``or`` operator. + A dictionary representing a complex filter expression. Complex filters are composed of filter expressions, represented as dictionaries. Each dictionary can specify either an attribute expression, a list of filter expressions to be composed with the \ :literal:`and`\ operator, or a list of filter expressions to be composed with the \ :literal:`or`\ operator. - The ``attribute``, ``and`` and ``or`` options are mutually exclusive with each other. + The \ :literal:`attribute`\ , \ :literal:`and`\ and \ :literal:`or`\ options are mutually exclusive with each other. - Can contain one or more filters. Multiple filters must be combined using ``and`` or ``or`` logical operators. + Can contain one or more filters. Multiple filters must be combined using \ :literal:`and`\ or \ :literal:`or`\ logical operators. Filters can be nested. - When supplying the ``attribute`` option, you must also supply a ``value`` for the filter. You can also override the default operator of ``=`` with the ``operator`` option. + When supplying the \ :literal:`attribute`\ option, you must also supply a \ :literal:`value`\ for the filter. You can also override the default operator of \ :literal:`=`\ with the \ :literal:`operator`\ option. - For examples, see "Examples" in :ref:`cmci_get `. + For examples, see "Examples" in \ :ref:`ibm.ibm\_zos\_cics.cmci\_get `\ . | **required**: False @@ -160,9 +160,9 @@ resources and - A list of filter expressions to be combined with an ``and`` operation. + A list of filter expressions to be combined with an \ :literal:`and`\ operation. - Filter expressions are nested ``complex_filter`` elements. Each nested filter expression can be either an ``attribute``, ``and`` or ``or`` complex filter expression. + Filter expressions are nested \ :literal:`complex\_filter`\ elements. Each nested filter expression can be either an \ :literal:`attribute`\ , \ :literal:`and`\ or \ :literal:`or`\ complex filter expression. | **required**: False @@ -173,7 +173,7 @@ resources attribute The name of a resource table attribute on which to filter. - For supported attributes of different resource types, see their resource table reference, for example, `PROGDEF resource table reference `_. + For supported attributes of different resource types, see their resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -182,7 +182,7 @@ resources operator - These operators are accepted: ``<`` or ``LT`` (less than), ``<=`` or ``LE`` (less than or equal to), ``=`` or ``EQ`` (equal to), ``>`` or ``GT`` (greater than), ``>=`` or ``GE`` (greater than or equal to), ``==`` or ``IS`` (is), ``¬=``, ``!=``, or ``NE`` (not equal to). If not supplied when ``attribute`` is used, ``EQ`` is assumed. + These operators are accepted: \ :literal:`\<`\ or \ :literal:`LT`\ (less than), \ :literal:`\<=`\ or \ :literal:`LE`\ (less than or equal to), \ :literal:`=`\ or \ :literal:`EQ`\ (equal to), \ :literal:`\>`\ or \ :literal:`GT`\ (greater than), \ :literal:`\>=`\ or \ :literal:`GE`\ (greater than or equal to), \ :literal:`==`\ or \ :literal:`IS`\ (is), \ :literal:`¬=`\ , \ :literal:`!=`\ , or \ :literal:`NE`\ (not equal to). If not supplied when \ :literal:`attribute`\ is used, \ :literal:`EQ`\ is assumed. @@ -193,9 +193,9 @@ resources or - A list of filter expressions to be combined with an ``or`` operation. + A list of filter expressions to be combined with an \ :literal:`or`\ operation. - Filter expressions are nested ``complex_filter`` elements. Each nested filter expression can be either an ``attribute``, ``and`` or ``or`` complex filter expression. + Filter expressions are nested \ :literal:`complex\_filter`\ elements. Each nested filter expression can be either an \ :literal:`attribute`\ , \ :literal:`and`\ or \ :literal:`or`\ complex filter expression. | **required**: False @@ -206,7 +206,7 @@ resources value The value by which you are to filter the resource attributes. - The value must be a valid one for the resource table attribute as documented in the resource table reference, for example, `PROGDEF resource table reference `_. + The value must be a valid one for the resource table attribute as documented in the resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -218,21 +218,21 @@ resources filter A dictionary with attribute names as keys, and target values, to be used as criteria to filter the set of resources returned from CICSPlex SM. - Filters implicitly use the ``=`` operator. + Filters implicitly use the \ :literal:`=`\ operator. - Filters for ``string`` type attributes can use the ``*`` and ``+`` wildcard operators. + Filters for \ :literal:`string`\ type attributes can use the \ :literal:`\*`\ and \ :literal:`+`\ wildcard operators. - ``*`` is a wildcard representing an unknown number of characters, and must appear at the end of the value. + \ :literal:`\*`\ is a wildcard representing an unknown number of characters, and must appear at the end of the value. - ``+`` is a wildcard representing a single character, and can appear in any place in the value, potentially multiple times. + \ :literal:`+`\ is a wildcard representing a single character, and can appear in any place in the value, potentially multiple times. - To use more complicated filter expressions, including a range of different filter operators, and the ability to compose filters with ``and`` and ``or`` operators, see the ``complex_filter`` parameter. + To use more complicated filter expressions, including a range of different filter operators, and the ability to compose filters with \ :literal:`and`\ and \ :literal:`or`\ operators, see the \ :literal:`complex\_filter`\ parameter. - For more details, see `How to build a filter expression `_. + For more details, see \ `How to build a filter expression `__\ . - For examples, see :ref:`cmci_get ` + For examples, see \ :ref:`ibm.ibm\_zos\_cics.cmci\_get `\ . - For supported attributes of different resource types, see their resource table reference, for example, `PROGDEF resource table reference `_. + For supported attributes of different resource types, see their resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -241,7 +241,7 @@ resources get_parameters - A list of one or more parameters with optional values used to identify the resources for this request. Eligible parameters for identifying the target resources can be found in the resource table reference for the target resource type, as valid parameters for the GET operation in the "Valid CPSM operations" table. For example, the valid parameters for identifying a PROGDEF resource are CICSSYS, CSDGROUP and RESGROUP, as found in the `PROGDEF resource table reference `_. + A list of one or more parameters with optional values used to identify the resources for this request. Eligible parameters for identifying the target resources can be found in the resource table reference for the target resource type, as valid parameters for the GET operation in the "Valid CPSM operations" table. For example, the valid parameters for identifying a PROGDEF resource are CICSSYS, CSDGROUP and RESGROUP, as found in the \ `PROGDEF resource table reference `__\ . @@ -284,11 +284,11 @@ scheme scope Specifies the name of a CICSplex, CICS region group, CICS region, or logical scope that is associated with the query. - *scope* is a subset of *context* and limits the request to particular CICS systems or resources. + \ :emphasis:`scope`\ is a subset of \ :emphasis:`context`\ and limits the request to particular CICS systems or resources. - *scope* is optional. If it's not specified, the request is limited by the value of *context* alone. + \ :emphasis:`scope`\ is optional. If it's not specified, the request is limited by the value of \ :emphasis:`context`\ alone. - The value of *scope* must contain no spaces. *scope* is not case-sensitive. + The value of \ :emphasis:`scope`\ must contain no spaces. \ :emphasis:`scope`\ is not case-sensitive. | **required**: false @@ -296,8 +296,18 @@ scope +timeout + HTTP request timeout in seconds + + + | **required**: False + | **type**: int + | **default**: 30 + + + type - The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see `CMCI resource names `_. + The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see \ `CMCI resource names `__\ . | **required**: True @@ -314,26 +324,26 @@ Examples - name: delete a bundle in a CICS region cmci_delete: - cmci_host: 'winmvs2c.hursley.ibm.com' + cmci_host: "winmvs2c.hursley.ibm.com" cmci_port: 10080 - context: 'iyk3z0r9' + context: "iyk3z0r9" type: CICSBundle resource: filter: - name: 'PONGALT' + name: "PONGALT" - name: delete a bundle definition in a CICS region cmci_delete: - cmci_host: 'winmvs2c.hursley.ibm.com' + cmci_host: "winmvs2c.hursley.ibm.com" cmci_port: 10080 - context: 'iyk3z0r9' + context: "iyk3z0r9" type: CICSDefinitionBundle resource: filter: - name: 'PONGALT' + name: "PONGALT" get_parameters: - - name: 'csdgroup' - value: 'JVMGRP' + - name: "csdgroup" + value: "JVMGRP" @@ -374,7 +384,7 @@ Return Values cpsm_reason - | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2ky.html. + | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-alphabetical-order. | **returned**: success | **type**: str @@ -382,7 +392,7 @@ Return Values cpsm_reason_code - | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kw.html. + | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-numerical-order. | **returned**: success | **type**: int @@ -390,7 +400,7 @@ Return Values cpsm_response - | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kx.html. + | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-alphabetical-order. | **returned**: success | **type**: str @@ -398,7 +408,7 @@ Return Values cpsm_response_code - | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kv.html. + | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-numerical-order. | **returned**: success | **type**: str diff --git a/docs/source/modules/cmci_get.rst b/docs/source/modules/cmci_get.rst index 4fecec25..3d926506 100644 --- a/docs/source/modules/cmci_get.rst +++ b/docs/source/modules/cmci_get.rst @@ -1,9 +1,9 @@ .. ............................................................................... -.. © Copyright IBM Corporation 2020 . +.. © Copyright IBM Corporation 2020,2023 . .. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . .. ............................................................................... -:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/dev/plugins/modules/cmci_get.py +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/cmci_get.py .. _cmci_get_module: @@ -20,7 +20,7 @@ cmci_get -- Query CICS and CICSPlex SM resources and definitions Synopsis -------- -- Get information about installed and definitional CICS® and CICSPlex® SM resources from CICS regions, by initiating GET requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see `CMCI REST API `_. For information about how to compose GET requests, see `CMCI GET requests `_. +- Get information about installed and definitional CICS® and CICSPlex® SM resources from CICS regions, by initiating GET requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see \ `CMCI REST API `__\ . For information about how to compose GET requests, see \ `CMCI GET requests `__\ . @@ -34,11 +34,11 @@ Parameters cmci_cert Location of the PEM-formatted certificate chain file to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_CERT. + Can also be specified using the environment variable CMCI\_CERT. - Required if *cmci_key* is specified. + Required if \ :emphasis:`cmci\_key`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -58,11 +58,11 @@ cmci_host cmci_key Location of the PEM-formatted file storing your private key to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_KEY. + Can also be specified using the environment variable CMCI\_KEY. - Required if *cmci_cert* is specified. + Required if \ :emphasis:`cmci\_cert`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -71,13 +71,13 @@ cmci_key cmci_password - The password of *cmci_user* to pass HTTP basic authentication. + The password of \ :emphasis:`cmci\_user`\ to pass HTTP basic authentication. - Can also be specified using the environment variable CMCI_PASSWORD. + Can also be specified using the environment variable CMCI\_PASSWORD. - Required if *cmci_user* is specified. + Required if \ :emphasis:`cmci\_user`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -97,11 +97,11 @@ cmci_port cmci_user The user ID under which the CMCI request will run. - Can also be specified using the environment variable CMCI_USER. + Can also be specified using the environment variable CMCI\_USER. - Required if *cmci_password* is specified. + Required if \ :emphasis:`cmci\_password`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -110,11 +110,11 @@ cmci_user context - If CMCI is installed in a CICSPlex® SM environment, *context* is the name of the CICSplex or CMAS associated with the request, for example, ``PLEX1``. To determine whether a CMAS can be specified as *context*, see the **CMAS context** entry in the CICSPlex SM resource table reference of a resource. For example, according to the `PROGRAM resource table `_, CMAS context is not supported for PROGRAM. + If CMCI is installed in a CICSPlex® SM environment, \ :emphasis:`context`\ is the name of the CICSplex or CMAS associated with the request, for example, \ :literal:`PLEX1`\ . To determine whether a CMAS can be specified as \ :emphasis:`context`\ , see the \ :strong:`CMAS context`\ entry in the CICSPlex SM resource table reference of a resource. For example, according to the \ `PROGRAM resource table `__\ , CMAS context is not supported for PROGRAM. - If CMCI is installed in a single region (SMSS), *context* is the APPLID of the CICS region associate with the request. + If CMCI is installed in a single region (SMSS), \ :emphasis:`context`\ is the APPLID of the CICS region associate with the request. - The value of *context* must contain no spaces. *context* is not case-sensitive. + The value of \ :emphasis:`context`\ must contain no spaces. \ :emphasis:`context`\ is not case-sensitive. | **required**: True @@ -122,8 +122,20 @@ context +fail_on_nodata + Specifies whether the module should fail if no data is returned by the query. If set to true, the module will fail if no data is returned. + + Default behaviour is for the module to fail if no data is returned. When set to false, the module will return OK, just with no records. + + + | **required**: False + | **type**: bool + | **default**: True + + + insecure - When set to ``true``, disables SSL certificate trust chain verification when using HTTPS. + When set to \ :literal:`true`\ , disables SSL certificate trust chain verification when using HTTPS. | **required**: False @@ -134,7 +146,7 @@ insecure record_count Identifies a subset of records in the results cache, starting either from the first record in the results cache or from the record specified by the index parameter. If not specified, all the records are returned by default. - A negative number indicates a count back from the last record; for example, ``-1`` means the last record, ``-2`` the last record but one, and so on. + A negative number indicates a count back from the last record; for example, \ :literal:`-1`\ means the last record, \ :literal:`-2`\ the last record but one, and so on. The count value must be an integer; a value of zero is not permitted. @@ -154,17 +166,17 @@ resources complex_filter - A dictionary representing a complex filter expression. Complex filters are composed of filter expressions, represented as dictionaries. Each dictionary can specify either an attribute expression, a list of filter expressions to be composed with the ``and`` operator, or a list of filter expressions to be composed with the ``or`` operator. + A dictionary representing a complex filter expression. Complex filters are composed of filter expressions, represented as dictionaries. Each dictionary can specify either an attribute expression, a list of filter expressions to be composed with the \ :literal:`and`\ operator, or a list of filter expressions to be composed with the \ :literal:`or`\ operator. - The ``attribute``, ``and`` and ``or`` options are mutually exclusive with each other. + The \ :literal:`attribute`\ , \ :literal:`and`\ and \ :literal:`or`\ options are mutually exclusive with each other. - Can contain one or more filters. Multiple filters must be combined using ``and`` or ``or`` logical operators. + Can contain one or more filters. Multiple filters must be combined using \ :literal:`and`\ or \ :literal:`or`\ logical operators. Filters can be nested. - When supplying the ``attribute`` option, you must also supply a ``value`` for the filter. You can also override the default operator of ``=`` with the ``operator`` option. + When supplying the \ :literal:`attribute`\ option, you must also supply a \ :literal:`value`\ for the filter. You can also override the default operator of \ :literal:`=`\ with the \ :literal:`operator`\ option. - For examples, see "Examples" in :ref:`cmci_get `. + For examples, see "Examples" in \ :ref:`ibm.ibm\_zos\_cics.cmci\_get `\ . | **required**: False @@ -173,9 +185,9 @@ resources and - A list of filter expressions to be combined with an ``and`` operation. + A list of filter expressions to be combined with an \ :literal:`and`\ operation. - Filter expressions are nested ``complex_filter`` elements. Each nested filter expression can be either an ``attribute``, ``and`` or ``or`` complex filter expression. + Filter expressions are nested \ :literal:`complex\_filter`\ elements. Each nested filter expression can be either an \ :literal:`attribute`\ , \ :literal:`and`\ or \ :literal:`or`\ complex filter expression. | **required**: False @@ -186,7 +198,7 @@ resources attribute The name of a resource table attribute on which to filter. - For supported attributes of different resource types, see their resource table reference, for example, `PROGDEF resource table reference `_. + For supported attributes of different resource types, see their resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -195,7 +207,7 @@ resources operator - These operators are accepted: ``<`` or ``LT`` (less than), ``<=`` or ``LE`` (less than or equal to), ``=`` or ``EQ`` (equal to), ``>`` or ``GT`` (greater than), ``>=`` or ``GE`` (greater than or equal to), ``==`` or ``IS`` (is), ``¬=``, ``!=``, or ``NE`` (not equal to). If not supplied when ``attribute`` is used, ``EQ`` is assumed. + These operators are accepted: \ :literal:`\<`\ or \ :literal:`LT`\ (less than), \ :literal:`\<=`\ or \ :literal:`LE`\ (less than or equal to), \ :literal:`=`\ or \ :literal:`EQ`\ (equal to), \ :literal:`\>`\ or \ :literal:`GT`\ (greater than), \ :literal:`\>=`\ or \ :literal:`GE`\ (greater than or equal to), \ :literal:`==`\ or \ :literal:`IS`\ (is), \ :literal:`¬=`\ , \ :literal:`!=`\ , or \ :literal:`NE`\ (not equal to). If not supplied when \ :literal:`attribute`\ is used, \ :literal:`EQ`\ is assumed. @@ -206,9 +218,9 @@ resources or - A list of filter expressions to be combined with an ``or`` operation. + A list of filter expressions to be combined with an \ :literal:`or`\ operation. - Filter expressions are nested ``complex_filter`` elements. Each nested filter expression can be either an ``attribute``, ``and`` or ``or`` complex filter expression. + Filter expressions are nested \ :literal:`complex\_filter`\ elements. Each nested filter expression can be either an \ :literal:`attribute`\ , \ :literal:`and`\ or \ :literal:`or`\ complex filter expression. | **required**: False @@ -219,7 +231,7 @@ resources value The value by which you are to filter the resource attributes. - The value must be a valid one for the resource table attribute as documented in the resource table reference, for example, `PROGDEF resource table reference `_. + The value must be a valid one for the resource table attribute as documented in the resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -231,21 +243,21 @@ resources filter A dictionary with attribute names as keys, and target values, to be used as criteria to filter the set of resources returned from CICSPlex SM. - Filters implicitly use the ``=`` operator. + Filters implicitly use the \ :literal:`=`\ operator. - Filters for ``string`` type attributes can use the ``*`` and ``+`` wildcard operators. + Filters for \ :literal:`string`\ type attributes can use the \ :literal:`\*`\ and \ :literal:`+`\ wildcard operators. - ``*`` is a wildcard representing an unknown number of characters, and must appear at the end of the value. + \ :literal:`\*`\ is a wildcard representing an unknown number of characters, and must appear at the end of the value. - ``+`` is a wildcard representing a single character, and can appear in any place in the value, potentially multiple times. + \ :literal:`+`\ is a wildcard representing a single character, and can appear in any place in the value, potentially multiple times. - To use more complicated filter expressions, including a range of different filter operators, and the ability to compose filters with ``and`` and ``or`` operators, see the ``complex_filter`` parameter. + To use more complicated filter expressions, including a range of different filter operators, and the ability to compose filters with \ :literal:`and`\ and \ :literal:`or`\ operators, see the \ :literal:`complex\_filter`\ parameter. - For more details, see `How to build a filter expression `_. + For more details, see \ `How to build a filter expression `__\ . - For examples, see :ref:`cmci_get ` + For examples, see \ :ref:`ibm.ibm\_zos\_cics.cmci\_get `\ . - For supported attributes of different resource types, see their resource table reference, for example, `PROGDEF resource table reference `_. + For supported attributes of different resource types, see their resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -254,7 +266,7 @@ resources get_parameters - A list of one or more parameters with optional values used to identify the resources for this request. Eligible parameters for identifying the target resources can be found in the resource table reference for the target resource type, as valid parameters for the GET operation in the "Valid CPSM operations" table. For example, the valid parameters for identifying a PROGDEF resource are CICSSYS, CSDGROUP and RESGROUP, as found in the `PROGDEF resource table reference `_. + A list of one or more parameters with optional values used to identify the resources for this request. Eligible parameters for identifying the target resources can be found in the resource table reference for the target resource type, as valid parameters for the GET operation in the "Valid CPSM operations" table. For example, the valid parameters for identifying a PROGDEF resource are CICSSYS, CSDGROUP and RESGROUP, as found in the \ `PROGDEF resource table reference `__\ . @@ -297,11 +309,11 @@ scheme scope Specifies the name of a CICSplex, CICS region group, CICS region, or logical scope that is associated with the query. - *scope* is a subset of *context* and limits the request to particular CICS systems or resources. + \ :emphasis:`scope`\ is a subset of \ :emphasis:`context`\ and limits the request to particular CICS systems or resources. - *scope* is optional. If it's not specified, the request is limited by the value of *context* alone. + \ :emphasis:`scope`\ is optional. If it's not specified, the request is limited by the value of \ :emphasis:`context`\ alone. - The value of *scope* must contain no spaces. *scope* is not case-sensitive. + The value of \ :emphasis:`scope`\ must contain no spaces. \ :emphasis:`scope`\ is not case-sensitive. | **required**: false @@ -309,8 +321,18 @@ scope +timeout + HTTP request timeout in seconds + + + | **required**: False + | **type**: int + | **default**: 30 + + + type - The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see `CMCI resource names `_. + The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see \ `CMCI resource names `__\ . | **required**: True @@ -327,38 +349,38 @@ Examples - name: get 2 LOCFILEs from a CICSplex cmci_get: - cmci_host: 'winmvs2c.hursley.ibm.com' + cmci_host: "winmvs2c.hursley.ibm.com" cmci_port: 10080 - cmci_user: 'ibmuser' - cmci_password: '123456' - context: 'iyk3z0r9' - type: CICSLocalFile + cmci_user: "ibmuser" + cmci_password: "123456" + context: "iyk3z0r9" + type: CICSLocalFile record_count: 2 resource: filter: - dsname: 'CTS*' + dsname: "CTS*" - name: get a localfile in a CICS region cmci_get: - cmci_host: 'winmvs2c.hursley.ibm.com' + cmci_host: "winmvs2c.hursley.ibm.com" cmci_port: 10080 - cmci_cert: './sec/ansible.pem' - cmci_key: './sec/ansible.key' - context: 'iyk3z0r9' - type: 'CICSLocalFile' + cmci_cert: "./sec/ansible.pem" + cmci_key: "./sec/ansible.key" + context: "iyk3z0r9" + type: "CICSLocalFile" resources: filter: - dsname: 'XIAOPIN*' - file: 'DFH*' + dsname: "XIAOPIN*" + file: "DFH*" record_count: 1 - name: get a progdef from a CSD cmci_get: - cmci_host: 'winmvs2c.hursley.ibm.com' + cmci_host: "winmvs2c.hursley.ibm.com" cmci_port: 10080 - cmci_cert: './sec/ansible.pem' - cmci_key: './sec/ansible.key' - context: 'iyk3z0r9' + cmci_cert: "./sec/ansible.pem" + cmci_key: "./sec/ansible.key" + context: "iyk3z0r9" type: cicsdefinitionprogram resources: filter: @@ -368,25 +390,38 @@ Examples value: MYGRP record_count: 1 + - name: pass module even if bundle definition is not found + cmci_get: + cmci_host: "winmvs2c.hursley.ibm.com" + cmci_port: 10080 + cmci_cert: "./sec/ansible.pem" + cmci_key: "./sec/ansible.key" + context: "iyk3z0r9" + type: cicsdefinitionbundle + resources: + filter: + name: MYBUNDLE + get_parameters: + - name: csdgroup + value: MYGRP + record_count: 1 + fail_on_nodata: "false" + - name: Using complex_filter to combine filter expressions and change operators cmci_get: - cmci_host: 'winmvs2c.hursley.ibm.com' + cmci_host: "winmvs2c.hursley.ibm.com" cmci_port: 10080 - cmci_cert: './sec/ansible.pem' - cmci_key: './sec/ansible.key' - context: 'iyk3z0r9' - type: 'CICSRegion' + cmci_cert: "./sec/ansible.pem" + cmci_key: "./sec/ansible.key" + context: "iyk3z0r9" + type: "CICSRegion" resources: complex_filter: - or: [{ - attribute: 'currtasks', - value: '10', - operator: '<' - }, { - attribute: 'currtasks', - value: '100', - operator: '>' - }] + or: + [ + { attribute: "currtasks", value: "10", operator: "<" }, + { attribute: "currtasks", value: "100", operator: ">" }, + ] record_count: 1 @@ -428,7 +463,7 @@ Return Values cpsm_reason - | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2ky.html. + | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-alphabetical-order. | **returned**: success | **type**: str @@ -436,7 +471,7 @@ Return Values cpsm_reason_code - | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kw.html. + | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-numerical-order. | **returned**: success | **type**: int @@ -444,7 +479,7 @@ Return Values cpsm_response - | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kx.html. + | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-alphabetical-order. | **returned**: success | **type**: str @@ -452,7 +487,7 @@ Return Values cpsm_response_code - | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kv.html. + | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-numerical-order. | **returned**: success | **type**: str diff --git a/docs/source/modules/cmci_update.rst b/docs/source/modules/cmci_update.rst index 2d577e41..2aaf20d0 100644 --- a/docs/source/modules/cmci_update.rst +++ b/docs/source/modules/cmci_update.rst @@ -1,9 +1,9 @@ .. ............................................................................... -.. © Copyright IBM Corporation 2020 . +.. © Copyright IBM Corporation 2020,2023 . .. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . .. ............................................................................... -:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/dev/plugins/modules/cmci_update.py +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/cmci_update.py .. _cmci_update_module: @@ -20,7 +20,7 @@ cmci_update -- Update CICS and CICSPlex resources and definitions Synopsis -------- -- Make changes to CICS® and CICSPlex® SM resources in CICS regions, by initiating PUT requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see `CMCI REST API `_. For information about how to compose PUT requests, see `CMCI PUT requests `_. +- Make changes to CICS® and CICSPlex® SM resources in CICS regions, by initiating PUT requests via the CMCI REST API. The CMCI REST API can be configured in CICSPlex SM or stand-alone regions (SMSS). For information about the API, see \ `CMCI REST API `__\ . For information about how to compose PUT requests, see \ `CMCI PUT requests `__\ . @@ -32,7 +32,7 @@ Parameters attributes - The resource attributes to be created or updated. Available attributes can be found in the CICSPlex® SM resource table reference for the target resource type, for example, `PROGDEF resource table reference `_. + The resource attributes to be created or updated. Available attributes can be found in the CICSPlex® SM resource table reference for the target resource type, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -43,11 +43,11 @@ attributes cmci_cert Location of the PEM-formatted certificate chain file to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_CERT. + Can also be specified using the environment variable CMCI\_CERT. - Required if *cmci_key* is specified. + Required if \ :emphasis:`cmci\_key`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -67,11 +67,11 @@ cmci_host cmci_key Location of the PEM-formatted file storing your private key to be used for HTTPS client authentication. - Can also be specified using the environment variable CMCI_KEY. + Can also be specified using the environment variable CMCI\_KEY. - Required if *cmci_cert* is specified. + Required if \ :emphasis:`cmci\_cert`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: False @@ -80,13 +80,13 @@ cmci_key cmci_password - The password of *cmci_user* to pass HTTP basic authentication. + The password of \ :emphasis:`cmci\_user`\ to pass HTTP basic authentication. - Can also be specified using the environment variable CMCI_PASSWORD. + Can also be specified using the environment variable CMCI\_PASSWORD. - Required if *cmci_user* is specified. + Required if \ :emphasis:`cmci\_user`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -106,11 +106,11 @@ cmci_port cmci_user The user ID under which the CMCI request will run. - Can also be specified using the environment variable CMCI_USER. + Can also be specified using the environment variable CMCI\_USER. - Required if *cmci_password* is specified. + Required if \ :emphasis:`cmci\_password`\ is specified. - Authentication prioritises certificate authentication if *cmci_cert* and *cmci_key* are provided, then basic authentication if *cmci_user* and (cmci_password) are provided, and then unauthenticated if none is provided. + Authentication prioritises certificate authentication if \ :emphasis:`cmci\_cert`\ and \ :emphasis:`cmci\_key`\ are provided, then basic authentication if \ :emphasis:`cmci\_user`\ and \ :emphasis:`cmci\_password`\ are provided, and then unauthenticated if none is provided. | **required**: false @@ -119,11 +119,11 @@ cmci_user context - If CMCI is installed in a CICSPlex® SM environment, *context* is the name of the CICSplex or CMAS associated with the request, for example, ``PLEX1``. To determine whether a CMAS can be specified as *context*, see the **CMAS context** entry in the CICSPlex SM resource table reference of a resource. For example, according to the `PROGRAM resource table `_, CMAS context is not supported for PROGRAM. + If CMCI is installed in a CICSPlex® SM environment, \ :emphasis:`context`\ is the name of the CICSplex or CMAS associated with the request, for example, \ :literal:`PLEX1`\ . To determine whether a CMAS can be specified as \ :emphasis:`context`\ , see the \ :strong:`CMAS context`\ entry in the CICSPlex SM resource table reference of a resource. For example, according to the \ `PROGRAM resource table `__\ , CMAS context is not supported for PROGRAM. - If CMCI is installed in a single region (SMSS), *context* is the APPLID of the CICS region associate with the request. + If CMCI is installed in a single region (SMSS), \ :emphasis:`context`\ is the APPLID of the CICS region associate with the request. - The value of *context* must contain no spaces. *context* is not case-sensitive. + The value of \ :emphasis:`context`\ must contain no spaces. \ :emphasis:`context`\ is not case-sensitive. | **required**: True @@ -132,7 +132,7 @@ context insecure - When set to ``true``, disables SSL certificate trust chain verification when using HTTPS. + When set to \ :literal:`true`\ , disables SSL certificate trust chain verification when using HTTPS. | **required**: False @@ -150,17 +150,17 @@ resources complex_filter - A dictionary representing a complex filter expression. Complex filters are composed of filter expressions, represented as dictionaries. Each dictionary can specify either an attribute expression, a list of filter expressions to be composed with the ``and`` operator, or a list of filter expressions to be composed with the ``or`` operator. + A dictionary representing a complex filter expression. Complex filters are composed of filter expressions, represented as dictionaries. Each dictionary can specify either an attribute expression, a list of filter expressions to be composed with the \ :literal:`and`\ operator, or a list of filter expressions to be composed with the \ :literal:`or`\ operator. - The ``attribute``, ``and`` and ``or`` options are mutually exclusive with each other. + The \ :literal:`attribute`\ , \ :literal:`and`\ and \ :literal:`or`\ options are mutually exclusive with each other. - Can contain one or more filters. Multiple filters must be combined using ``and`` or ``or`` logical operators. + Can contain one or more filters. Multiple filters must be combined using \ :literal:`and`\ or \ :literal:`or`\ logical operators. Filters can be nested. - When supplying the ``attribute`` option, you must also supply a ``value`` for the filter. You can also override the default operator of ``=`` with the ``operator`` option. + When supplying the \ :literal:`attribute`\ option, you must also supply a \ :literal:`value`\ for the filter. You can also override the default operator of \ :literal:`=`\ with the \ :literal:`operator`\ option. - For examples, see "Examples" in :ref:`cmci_get `. + For examples, see "Examples" in \ :ref:`ibm.ibm\_zos\_cics.cmci\_get `\ . | **required**: False @@ -169,9 +169,9 @@ resources and - A list of filter expressions to be combined with an ``and`` operation. + A list of filter expressions to be combined with an \ :literal:`and`\ operation. - Filter expressions are nested ``complex_filter`` elements. Each nested filter expression can be either an ``attribute``, ``and`` or ``or`` complex filter expression. + Filter expressions are nested \ :literal:`complex\_filter`\ elements. Each nested filter expression can be either an \ :literal:`attribute`\ , \ :literal:`and`\ or \ :literal:`or`\ complex filter expression. | **required**: False @@ -182,7 +182,7 @@ resources attribute The name of a resource table attribute on which to filter. - For supported attributes of different resource types, see their resource table reference, for example, `PROGDEF resource table reference `_. + For supported attributes of different resource types, see their resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -191,7 +191,7 @@ resources operator - These operators are accepted: ``<`` or ``LT`` (less than), ``<=`` or ``LE`` (less than or equal to), ``=`` or ``EQ`` (equal to), ``>`` or ``GT`` (greater than), ``>=`` or ``GE`` (greater than or equal to), ``==`` or ``IS`` (is), ``¬=``, ``!=``, or ``NE`` (not equal to). If not supplied when ``attribute`` is used, ``EQ`` is assumed. + These operators are accepted: \ :literal:`\<`\ or \ :literal:`LT`\ (less than), \ :literal:`\<=`\ or \ :literal:`LE`\ (less than or equal to), \ :literal:`=`\ or \ :literal:`EQ`\ (equal to), \ :literal:`\>`\ or \ :literal:`GT`\ (greater than), \ :literal:`\>=`\ or \ :literal:`GE`\ (greater than or equal to), \ :literal:`==`\ or \ :literal:`IS`\ (is), \ :literal:`¬=`\ , \ :literal:`!=`\ , or \ :literal:`NE`\ (not equal to). If not supplied when \ :literal:`attribute`\ is used, \ :literal:`EQ`\ is assumed. @@ -202,9 +202,9 @@ resources or - A list of filter expressions to be combined with an ``or`` operation. + A list of filter expressions to be combined with an \ :literal:`or`\ operation. - Filter expressions are nested ``complex_filter`` elements. Each nested filter expression can be either an ``attribute``, ``and`` or ``or`` complex filter expression. + Filter expressions are nested \ :literal:`complex\_filter`\ elements. Each nested filter expression can be either an \ :literal:`attribute`\ , \ :literal:`and`\ or \ :literal:`or`\ complex filter expression. | **required**: False @@ -215,7 +215,7 @@ resources value The value by which you are to filter the resource attributes. - The value must be a valid one for the resource table attribute as documented in the resource table reference, for example, `PROGDEF resource table reference `_. + The value must be a valid one for the resource table attribute as documented in the resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -227,21 +227,21 @@ resources filter A dictionary with attribute names as keys, and target values, to be used as criteria to filter the set of resources returned from CICSPlex SM. - Filters implicitly use the ``=`` operator. + Filters implicitly use the \ :literal:`=`\ operator. - Filters for ``string`` type attributes can use the ``*`` and ``+`` wildcard operators. + Filters for \ :literal:`string`\ type attributes can use the \ :literal:`\*`\ and \ :literal:`+`\ wildcard operators. - ``*`` is a wildcard representing an unknown number of characters, and must appear at the end of the value. + \ :literal:`\*`\ is a wildcard representing an unknown number of characters, and must appear at the end of the value. - ``+`` is a wildcard representing a single character, and can appear in any place in the value, potentially multiple times. + \ :literal:`+`\ is a wildcard representing a single character, and can appear in any place in the value, potentially multiple times. - To use more complicated filter expressions, including a range of different filter operators, and the ability to compose filters with ``and`` and ``or`` operators, see the ``complex_filter`` parameter. + To use more complicated filter expressions, including a range of different filter operators, and the ability to compose filters with \ :literal:`and`\ and \ :literal:`or`\ operators, see the \ :literal:`complex\_filter`\ parameter. - For more details, see `How to build a filter expression `_. + For more details, see \ `How to build a filter expression `__\ . - For examples, see :ref:`cmci_get ` + For examples, see \ :ref:`ibm.ibm\_zos\_cics.cmci\_get `\ . - For supported attributes of different resource types, see their resource table reference, for example, `PROGDEF resource table reference `_. + For supported attributes of different resource types, see their resource table reference, for example, \ `PROGDEF resource table reference `__\ . | **required**: False @@ -250,7 +250,7 @@ resources get_parameters - A list of one or more parameters with optional values used to identify the resources for this request. Eligible parameters for identifying the target resources can be found in the resource table reference for the target resource type, as valid parameters for the GET operation in the "Valid CPSM operations" table. For example, the valid parameters for identifying a PROGDEF resource are CICSSYS, CSDGROUP and RESGROUP, as found in the `PROGDEF resource table reference `_. + A list of one or more parameters with optional values used to identify the resources for this request. Eligible parameters for identifying the target resources can be found in the resource table reference for the target resource type, as valid parameters for the GET operation in the "Valid CPSM operations" table. For example, the valid parameters for identifying a PROGDEF resource are CICSSYS, CSDGROUP and RESGROUP, as found in the \ `PROGDEF resource table reference `__\ . @@ -293,11 +293,11 @@ scheme scope Specifies the name of a CICSplex, CICS region group, CICS region, or logical scope that is associated with the query. - *scope* is a subset of *context* and limits the request to particular CICS systems or resources. + \ :emphasis:`scope`\ is a subset of \ :emphasis:`context`\ and limits the request to particular CICS systems or resources. - *scope* is optional. If it's not specified, the request is limited by the value of *context* alone. + \ :emphasis:`scope`\ is optional. If it's not specified, the request is limited by the value of \ :emphasis:`context`\ alone. - The value of *scope* must contain no spaces. *scope* is not case-sensitive. + The value of \ :emphasis:`scope`\ must contain no spaces. \ :emphasis:`scope`\ is not case-sensitive. | **required**: false @@ -305,8 +305,18 @@ scope +timeout + HTTP request timeout in seconds + + + | **required**: False + | **type**: int + | **default**: 30 + + + type - The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see `CMCI resource names `_. + The CMCI external resource name that maps to the target CICS or CICSPlex SM resource type. For a list of CMCI external resource names, see \ `CMCI resource names `__\ . | **required**: True @@ -315,7 +325,7 @@ type update_parameters - A list of one or more parameters that control the *update* operation. Eligible parameters for the UPDATE operation can be found in the resource table reference for the target resource type, as listed in the UPDATE operation section of the "Valid CPSM operations" table. For example, the only valid parameter for a PROGDEF UPDATE operation is CSD, as found in the `PROGDEF resource table reference `_. + A list of one or more parameters that control the \ :emphasis:`update`\ operation. Eligible parameters for the UPDATE operation can be found in the resource table reference for the target resource type, as listed in the UPDATE operation section of the "Valid CPSM operations" table. For example, the only valid parameter for a PROGDEF UPDATE operation is CSD, as found in the \ `PROGDEF resource table reference `__\ . @@ -352,17 +362,17 @@ Examples - name: update a bundle definition in a CICS region cmci_update: - cmci_host: 'winmvs2c.hursley.ibm.com' - cmci_port: '10080' - context: 'iyk3z0r9' - type: 'CICSDefinitionBundle' + cmci_host: "winmvs2c.hursley.ibm.com" + cmci_port: "10080" + context: "iyk3z0r9" + type: "CICSDefinitionBundle" attributes: - description: 'New description' + description: "New description" update_parameters: - name: csd resource: filter: - name: 'PONGALT' + name: "PONGALT" get_parameters: - name: csdgroup value: JVMGRP @@ -406,7 +416,7 @@ Return Values cpsm_reason - | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2ky.html. + | The character value of the REASON code returned by each CICSPlex SM API command. For a list of REASON character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-alphabetical-order. | **returned**: success | **type**: str @@ -414,7 +424,7 @@ Return Values cpsm_reason_code - | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kw.html. + | The numeric value of the REASON code returned by each CICSPlex SM API command. For a list of REASON numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-reason-in-numerical-order. | **returned**: success | **type**: int @@ -422,7 +432,7 @@ Return Values cpsm_response - | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kx.html. + | The character value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE character values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-alphabetical-order. | **returned**: success | **type**: str @@ -430,7 +440,7 @@ Return Values cpsm_response_code - | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/support/knowledgecenter/SSGMCP_5.6.0/reference-system-programming/commands-cpsm/eyup2kv.html. + | The numeric value of the RESPONSE code returned by each CICSPlex SM API command. For a list of RESPONSE numeric values, see https://www.ibm.com/docs/en/cics-ts/latest?topic=values-eyuda-response-in-numerical-order. | **returned**: success | **type**: str diff --git a/docs/source/modules/csd.rst b/docs/source/modules/csd.rst new file mode 100644 index 00000000..20e624aa --- /dev/null +++ b/docs/source/modules/csd.rst @@ -0,0 +1,481 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/csd.py + +.. _csd_module: + + +csd -- Create, remove, and manage the CICS CSD +============================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create, remove, and manage the \ `CICS system definition data set `__\ (CSD) used by a CICS® region. +- You can use this module when provisioning or de-provisioning a CICS region, or when managing the state of the CSD during upgrades or restarts. +- Use the \ :literal:`state`\ option to specify the intended state for the CSD. For example, use \ :literal:`state=initial`\ to create and initialize a CSD if it doesn't exist, or empty an existing CSD of all records. + + + + + +Parameters +---------- + + + +cics_data_sets + The name of the \ :literal:`SDFHLOAD`\ library of the CICS installation, for example, \ :literal:`CICSTS61.CICS.SDFHLOAD`\ . + + + | **required**: True + | **type**: dict + + + + sdfhload + The location of the \ :literal:`SDFHLOAD`\ library. If \ :literal:`cics\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + template + The templated location of the \ :literal:`SDFHLOAD`\ library. + + + | **required**: False + | **type**: str + + + + +input_content + The content of the DFHCSDUP script to submit, if you are using the \ :literal:`input\_location=INLINE`\ option. + + + | **required**: False + | **type**: str + + + +input_location + The type of location from which to load the DFHCSDUP script. + + Specify \ :literal:`DATA\_SET`\ to load from a PDS, PDSE, or sequential data set. + + Specify \ :literal:`USS`\ to load from a file on UNIX System Services (USS). + + Specify \ :literal:`LOCAL`\ to load from a file local to the Ansible control node. + + Specify \ :literal:`INLINE`\ to allow a script to be passed directly through the \ :literal:`input\_content`\ parameter. + + + | **required**: False + | **type**: str + | **default**: DATA_SET + | **choices**: DATA_SET, USS, LOCAL, INLINE + + + +input_src + The path to the source file that contains the DFHCSDUP script to submit. + + It can be a data set. For example: "TESTER.DEFS.SCRIPT" or "TESTER.DEFS(SCRIPT)" + + It can be a USS file. For example: "/u/tester/defs/script.csdup" + + It can be a local file. For example: "/User/tester/defs/script.csdup" + + + | **required**: False + | **type**: str + + + +log + Specify the recovery attribute for the CSD, overriding the CSD system initialization parameters. + + Specify NONE for a nonrecoverable CSD. + + Specify UNDO for a CSD that is limited to file backout only. + + Specify ALL for a CSD for which you want both forward recovery and file backout. If you specify \ :literal:`log=ALL`\ , you must also specify LOGSTREAMID to identify the 26-character name of the z/OS™ log stream to be used as the forward recovery log. The CICS collection does not support defining forward recovery log streams; you must follow the instructions in \ `Defining forward recovery log streams `__\ . + + + | **required**: False + | **type**: str + | **choices**: NONE, UNDO, ALL + + + +logstream_id + The 26-character name of the z/OS™ log stream to be used as the forward recovery log. + + This is required when you use \ :literal:`log=ALL`\ . + + + | **required**: False + | **type**: str + + + +region_data_sets + The location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . + + + | **required**: True + | **type**: dict + + + + dfhcsd + Overrides the templated location for the CSD. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the CSD to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets with a template. + + + | **required**: False + | **type**: str + + + + +space_primary + The size of the primary space allocated to the CSD. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the CSD is being created. If the CSD already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 4 + + + +space_secondary + The size of the secondary space allocated to the CSD. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the CSD is being created. If the CSD already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 1 + + + +space_type + The unit portion of the CSD size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the CSD is being created. If the CSD already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), records (\ :literal:`REC`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + + | **required**: False + | **type**: str + | **default**: M + | **choices**: M, K, REC, CYL, TRK + + + +state + The intended state for the CSD, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the CSD entirely, if it already exists. + + Specify \ :literal:`initial`\ to create the CSD if it does not already exist, and initialize it by using DFHCSDUP. + + Specify \ :literal:`warm`\ to retain an existing CSD in its current state. The module verifies whether the specified data set exists and whether it contains any records. If both conditions are met, the module leaves the data set as is. If the data set does not exist or if it is empty, the operation fails. + + Specify \ :literal:`changed`\ to run a DFHCSDUP script to update an existing CSD. + + + | **required**: True + | **type**: str + | **choices**: initial, absent, warm, changed + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Initialize a CSD by using the templated location + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "initial" + + - name: Initialize a user specified CSD + ibm.ibm_zos_cics.csd: + region_data_sets: + dfhcsd: + dsn: "REGIONS.ABCD0001.DFHCSD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "initial" + + - name: Initialize a large CSD by using the templated location + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + space_primary: 10 + space_type: "M" + state: "initial" + + - name: Delete a CSD defined by the template + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "absent" + + - name: Delete a user specified CSD + ibm.ibm_zos_cics.csd: + region_data_sets: + dfhcsd: + dsn: "REGIONS.ABCD0001.DFHCSD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "absent" + + - name: Retain the existing state of a CSD defined by the template + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "warm" + + - name: Retain the existing state of a user specified CSD + ibm.ibm_zos_cics.csd: + region_data_sets: + dfhcsd: + dsn: "REGIONS.ABCD0001.DFHCSD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "warm" + + - name: Run a DFHCSDUP script from a data set + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "changed" + input_location: "DATA_SET" + input_src: "TESTER.DEFS.SCRIPT" + + - name: Run a DFHCSDUP script from a USS file + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + input_location: "USS" + input_src: "/u/tester/defs/script.csdup" + + - name: Run a DFHCSDUP script from a local file + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + input_location: "LOCAL" + input_src: "/User/tester/defs/script.csdup" + + - name: Run a DFHCSDUP script inline + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + input_location: "INLINE" + input_content: | + DEFINE PROGRAM(TESTPRG1) GROUP(TESTGRP1) + DEFINE PROGRAM(TESTPRG2) GROUP(TESTGRP2) + + + + + + + + + +Return Values +------------- + + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the CSD before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the CSD exists. + + | **returned**: always + | **type**: bool + + + + + + end_state + | The state of the CSD at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the CSD exists. + + | **returned**: always + | **type**: bool + + + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard output stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/global_catalog.rst b/docs/source/modules/global_catalog.rst new file mode 100644 index 00000000..0c605a17 --- /dev/null +++ b/docs/source/modules/global_catalog.rst @@ -0,0 +1,433 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/global_catalog.py + +.. _global_catalog_module: + + +global_catalog -- Create, remove, and manage the CICS global catalog +==================================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create, remove, and manage the \ `global catalog `__\ data set used by a CICS® region. The global catalog is used to store start type information, location of the CICS system log, installed resource definitions, terminal control information and profiles. It contains information that CICS requires on a restart. +- You can use this module when provisioning or de-provisioning a CICS region, or when managing the state of the global catalog during upgrades or restarts. +- Use the \ :literal:`state`\ option to specify the intended state for the global catalog. For example, use \ :literal:`state=initial`\ to create and initialize a global catalog data set if it doesn't exist, or set the autostart override record of an existing global catalog to \ :literal:`AUTOINIT`\ . In either case, a CICS region that is using this global catalog and set with the \ :literal:`START=AUTO`\ system initialization parameter performs an initial start. + + + + + +Parameters +---------- + + + +cics_data_sets + The name of the \ :literal:`SDFHLOAD`\ library of the CICS installation, for example, \ :literal:`CICSTS61.CICS.SDFHLOAD`\ . + + This module uses the \ :literal:`DFHRMUTL`\ utility internally, which is found in the \ :literal:`SDFHLOAD`\ library. + + + | **required**: True + | **type**: dict + + + + sdfhload + The location of the \ :literal:`SDFHLOAD`\ library. If \ :literal:`cics\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + template + The templated location of the \ :literal:`SDFHLOAD`\ library. + + + | **required**: False + | **type**: str + + + + +region_data_sets + The location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . + + If you want to use a data set that already exists, ensure that the data set is a global catalog data set. + + + | **required**: True + | **type**: dict + + + + dfhgcd + Overrides the templated location for the global catalog data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the global catalog to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets with a template. + + + | **required**: False + | **type**: str + + + + +space_primary + The size of the primary space allocated to the global catalog data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the global catalog data set is being created. If the global catalog data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 5 + + + +space_secondary + The size of the secondary space allocated to the global catalog data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the global catalog data set is being created. If the global catalog data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 1 + + + +space_type + The unit portion of the global catalog data set size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the global catalog data set is being created. If the global catalog data set already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), records (\ :literal:`REC`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + + | **required**: False + | **type**: str + | **default**: M + | **choices**: M, K, REC, CYL, TRK + + + +state + The intended state for the global catalog data set, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the global catalog data set entirely, if it exists. + + Specify \ :literal:`initial`\ to set the autostart override record to \ :literal:`AUTOINIT`\ . If the specified global catalog data set does not already exist, the module creates the data set. + + Specify \ :literal:`cold`\ to set the autostart override record of an existing global catalog to \ :literal:`AUTOCOLD`\ . If the specified global catalog data set does not already exist, the operation fails. + + Specify \ :literal:`warm`\ to set the autostart override record of an existing global catalog to \ :literal:`AUTOASIS`\ , undoing any previous setting of \ :literal:`AUTOINIT`\ or \ :literal:`AUTOCOLD`\ . The module verifies whether the specified data set exists and whether it contains any records. If either condition is not met, the operation fails. + + + | **required**: True + | **type**: str + | **choices**: absent, initial, cold, warm + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Initialize a global catalog by using the templated location + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "initial" + + - name: Initialize a large global catalog by using the templated location + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + space_primary: 100 + space_type: "M" + state: "initial" + + - name: Initialize a large user specified global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "REGIONS.ABCD0001.DFHGCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + space_primary: 100 + space_type: "M" + state: "initial" + + - name: Set the autostart override record to AUTOASIS for a global catalog defined by the template + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "warm" + + - name: Set the autostart override record to AUTOASIS for a user specified global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "REGIONS.ABCD0001.DFHGCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "warm" + + - name: Set the autostart override record to AUTOCOLD for a global catalog defined by the template + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "cold" + + - name: Set the autostart override record to AUTOCOLD for a user specified global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "REGIONS.ABCD0001.DFHGCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "cold" + + - name: Delete a global catalog defined by the template + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "absent" + + - name: Delete a user specified global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "REGIONS.ABCD0001.DFHGCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "absent" + + + + + + +See Also +-------- + +.. seealso:: + + - :ref:`local_catalog_module` + + + +Return Values +------------- + + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the global catalog before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + autostart_override + | The current autostart override record. + + | **returned**: always + | **type**: str + + + + next_start + | The next start type listed in the global catalog. + + | **returned**: always + | **type**: str + + + + exists + | True if the specified global catalog data set exists. + + | **returned**: always + | **type**: bool + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + + + end_state + | The state of the global catalog at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + autostart_override + | The current autostart override record. + + | **returned**: always + | **type**: str + + + + next_start + | The next start type listed in the global catalog + + | **returned**: always + | **type**: str + + + + exists + | True if the specified global catalog data set exists. + + | **returned**: always + | **type**: bool + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard output stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/local_catalog.rst b/docs/source/modules/local_catalog.rst new file mode 100644 index 00000000..73ae81e4 --- /dev/null +++ b/docs/source/modules/local_catalog.rst @@ -0,0 +1,380 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/local_catalog.py + +.. _local_catalog_module: + + +local_catalog -- Create, remove, and manage the CICS local catalog +================================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create, remove, and manage the \ `local catalog `__\ data set used by a CICS® region. CICS domains use the local catalog to save some of their information between CICS runs and to preserve this information across a cold start. +- You can use this module when provisioning or de-provisioning a CICS region, or when managing the state of the local catalog during upgrades or restarts. +- Use the \ :literal:`state`\ option to specify the intended state for the local catalog. For example, use \ :literal:`state=initial`\ to create and initialize a local catalog data set if it doesn't exist, or empty an existing local catalog of all records. + + + + + +Parameters +---------- + + + +cics_data_sets + The name of the \ :literal:`SDFHLOAD`\ library of the CICS installation, for example, \ :literal:`CICSTS61.CICS.SDFHLOAD`\ . + + This module uses the \ :literal:`DFHCCUTL`\ utility internally, which is found in the \ :literal:`SDFHLOAD`\ library. + + + | **required**: True + | **type**: dict + + + + sdfhload + The location of the \ :literal:`SDFHLOAD`\ library. If \ :literal:`cics\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + template + The templated location of the \ :literal:`SDFHLOAD`\ library. + + + | **required**: False + | **type**: str + + + + +region_data_sets + The location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . + + If you want to use a data set that already exists, ensure that the data set is a local catalog data set. + + + | **required**: True + | **type**: dict + + + + dfhlcd + Overrides the templated location for the local catalog data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the local catalog to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets with a template. + + + | **required**: False + | **type**: str + + + + +space_primary + The size of the primary space allocated to the local catalog data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the local catalog data set is being created. If the local catalog data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 200 + + + +space_secondary + The size of the secondary space allocated to the local catalog data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the local catalog data set is being created. If the local catalog data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 5 + + + +space_type + The unit portion of the local catalog data set size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the local catalog data set is being created. If the local catalog data set already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), records (\ :literal:`REC`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + + | **required**: False + | **type**: str + | **default**: REC + | **choices**: M, K, REC, CYL, TRK + + + +state + The intended state for the local catalog, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the local catalog data set entirely, if it already exists. + + Specify \ :literal:`initial`\ to create the local catalog data set if it does not exist, or empty this existing local catalog of all records. + + Specify \ :literal:`warm`\ to retain an existing local catalog in its current state. The module verifies whether the specified data set exists and whether it contains any records. If both conditions are met, the module leaves the data set as is. If the data set does not exist or if it is empty, the operation fails. + + + | **required**: True + | **type**: str + | **choices**: initial, absent, warm + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Initialize a local catalog data set by using the templated location + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "initial" + + - name: Initialize a user specified local catalog data set + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + dfhlcd: + dsn: "REGIONS.ABCD0001.DFHLCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "initial" + + - name: Initialize a large catalog data set by using the templated location + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + space_primary: 500 + space_type: "REC" + state: "initial" + + - name: Retain the existing local catalog defined by the template + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "warm" + + - name: Retain a user specified local catalog in its current state + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + dfhlcd: + dsn: "REGIONS.ABCD0001.DFHLCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "warm" + + - name: Delete a local catalog data set defined by the template + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "absent" + + - name: Delete a user specified local catalog data set + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + dfhlcd: + dsn: "REGIONS.ABCD0001.DFHLCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "absent" + + + + + + +See Also +-------- + +.. seealso:: + + - :ref:`global_catalog_module` + + + +Return Values +------------- + + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the local catalog data set before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the specified local catalog data set exists. + + | **returned**: always + | **type**: bool + + + + + + end_state + | The state of the local catalog data set at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the specified local catalog data set exists. + + | **returned**: always + | **type**: bool + + + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard output stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/local_request_queue.rst b/docs/source/modules/local_request_queue.rst new file mode 100644 index 00000000..34bf6b77 --- /dev/null +++ b/docs/source/modules/local_request_queue.rst @@ -0,0 +1,330 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/local_request_queue.py + +.. _local_request_queue_module: + + +local_request_queue -- Create and remove the CICS local request queue +===================================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create and remove the \ `local request queue `__\ data set used by a CICS® region. The local request queue data set stores pending BTS requests. It ensures that, if CICS fails, no pending requests are lost. +- You can use this module when provisioning or de-provisioning a CICS region. +- Use the \ :literal:`state`\ option to specify the intended state for the local request queue. For example, use \ :literal:`state=initial`\ to create a local request queue data set if it doesn't yet exist, or empty an existing local request queue of all records. + + + + + +Parameters +---------- + + + +region_data_sets + The location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . + + If you want to use a data set that already exists, ensure that the data set is a local request queue data set. + + + | **required**: True + | **type**: dict + + + + dfhlrq + Overrides the templated location for the local request queue data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the local request queue to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets with a template. + + + | **required**: False + | **type**: str + + + + +space_primary + The size of the primary space allocated to the local request queue data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect when the local request queue data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 4 + + + +space_secondary + The size of the secondary space allocated to the local request queue data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect when the local request queue data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 1 + + + +space_type + The unit portion of the local request queue data set size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the local request queue data set is being created. If the data set already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), records (\ :literal:`REC`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + + | **required**: False + | **type**: str + | **default**: M + | **choices**: M, K, REC, CYL, TRK + + + +state + The intended state for the local request queue, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the local request queue data set entirely, if it exists. + + Specify \ :literal:`initial`\ to create the local request queue data set if it does not exist, or empty this existing local request queue of all records. + + Specify \ :literal:`warm`\ to retain an existing local request queue data set in its current state. The module checks whether the specified data set exists, and if it does, leaves the data set as is. If the data set does not exist, the operation fails. + + + | **required**: True + | **type**: str + | **choices**: initial, absent, warm + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Initialize a local request queue data set by using the templated location + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "initial" + + - name: Initialize a user specified local request queue data set + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + dfhlrq: + dsn: "REGIONS.ABCD0001.DFHLRQ" + state: "initial" + + - name: Initialize a large request queue data set by using the templated location + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + space_primary: 50 + space_type: "M" + state: "initial" + + - name: Retain the existing state of a local request queue data set defined by the template + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + + - name: Retain the existing state of a user specified local request queue data set + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + dfhlrq: + dsn: "REGIONS.ABCD0001.DFHLRQ" + state: "warm" + + - name: Delete a local request queue data set defined by the template + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "absent" + + - name: Delete a user specified local request queue data set + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + dfhlrq: + dsn: "REGIONS.ABCD0001.DFHLRQ" + state: "absent" + + + + + + + + + +Return Values +------------- + + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the local request queue data set before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the specified local request queue data set exists. + + | **returned**: always + | **type**: bool + + + + + + end_state + | The state of the local request queue data set at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the specified local request queue data set exists. + + | **returned**: always + | **type**: bool + + + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard output stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/region_jcl.rst b/docs/source/modules/region_jcl.rst new file mode 100644 index 00000000..8e5728b9 --- /dev/null +++ b/docs/source/modules/region_jcl.rst @@ -0,0 +1,3790 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/region_jcl.py + +.. _region_jcl_module: + + +region_jcl -- Create CICS startup JCL data set +============================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create a data set containing the JCL to start a CICS® region. +- The JCL is generated by your input of CICS system data sets and system initialization parameters for CICS startup using the \ :literal:`DFHSIP`\ program. + + + + + +Parameters +---------- + + + +applid + The name of your z/OS Communications Server application identifier for this CICS region. + + + | **required**: True + | **type**: str + + + +cics_data_sets + The data set names of the \ :literal:`SDFHAUTH`\ , \ :literal:`SDFHLOAD`\ and \ :literal:`SDFHLIC`\ libraries, for example, \ :literal:`CICSTS61.CICS.SDFHAUTH`\ and \ :literal:`CICSTS61.CICS.SDFHLOAD`\ . + + + | **required**: True + | **type**: dict + + + + sdfhauth + The location of the \ :literal:`SDFHAUTH`\ librarty to override the template. + + + | **required**: False + | **type**: str + + + + sdfhlic + The location of the \ :literal:`SDFHLIC`\ library. If \ :literal:`cics\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + sdfhload + The location of the \ :literal:`SDFHLOAD`\ library. If \ :literal:`cics\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + template + The templated location of the libraries. + + + | **required**: False + | **type**: str + + + + +cpsm_data_sets + The data set names of the \ :literal:`SEYUAUTH`\ and \ :literal:`SEYULOAD`\ libraries, for example, \ :literal:`CTS610.CPSM610.SEYUAUTH`\ . + + + | **required**: False + | **type**: dict + + + + seyuauth + The location of the \ :literal:`SEYUAUTH`\ library. If \ :literal:`cpsm\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + seyuload + The location of the \ :literal:`SEYULOAD`\ library. If \ :literal:`cpsm\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + template + The templated location of the CICSPlex SM libraries. + + + | **required**: False + | **type**: str + + + + +dfhrpl + Any locations of additional data sets other than \ :literal:`SDFHLOAD`\ , \ :literal:`SCEECICS`\ , \ :literal:`SCEERUN`\ , or \ :literal:`SCEERUN2`\ , to be added to the DFHRPL concatenation. The DFHRPL concatenation is where you specify the libraries that contain modules loaded by CICS, for example, the libraries containing your CICS application programs, your CICS control tables, and so on. You can either add data sets at the very top of the list or append them to the bottom of the list. There are other data sets in between, as determined by the defaults or other input parameters; for example, \ :literal:`SCEERUN`\ and \ :literal:`SCEERUN2`\ as specified with \ :literal:`le\_data\_sets`\ , \ :literal:`SDFHLOAD`\ as specified with \ :literal:`cics\_data\_sets`\ , and so on. + + + | **required**: False + | **type**: dict + + + + data_sets + The \ :literal:`DFHRPL`\ data sets to be added to the bottom of the list. + + + | **required**: False + | **type**: list + + + + top_data_sets + The \ :literal:`DFHRPL`\ data sets to be added to the very top of the list. + + + | **required**: False + | **type**: list + + + + +job_parameters + Specifies various parameters to be applied to the CICS startup job. + + + | **required**: False + | **type**: dict + + + + accounting_information + Allows jobs to be grouped into a class. + + + | **required**: False + | **type**: dict + + + + cards + Specifies the estimated number of cards JES2 is to punch from this job's sysout data sets. The value is 1 through 4 decimal numbers. If you omit cards, JES2 uses an installation default specified at initialization. + + + | **required**: False + | **type**: int + + + + copies + Specifies the number of times JES2 is to print or punch this job's sysout data sets. The value is 1 through 3 decimal numbers and must not exceed an installation-specified limit. The maximum is 255. For example, code 2 for two copies. If you omit copies, JES2 assumes one copy. + + + | **required**: False + | **type**: int + + + + forms + Specifies the forms that JES2 is to use for printing this job's sysout data sets. The value is 1 through 4 alphanumeric characters. For example, code 5 for 5-part forms. If you omit forms, JES2 uses an installation default specified at initialization. + + + | **required**: False + | **type**: str + + + + linect + Specifies the number of lines JES2 is to print per page for this job's sysout data sets. The value is 1 through 3 decimal numbers. If you omit linect, JES2 uses an installation default specified at initialization. If you code a zero, JES2 does not eject to a new page when the number of lines exceeds the installation default. + + + | **required**: False + | **type**: int + + + + lines + Specifies the estimated line count, in thousands of lines, from this job's sysout data sets. The value is 1 through 4 decimal numbers. For example, code 5 for 5000 lines. If you omit lines, JES2 uses an installation default specified at initialization. + + + | **required**: False + | **type**: int + + + + log + Specifies whether JES2 is to print the job log. Code N to surpress printing of the job log. If you code any other character or omit this subparameter, JES2 prints the job log. If your installation specified NOLOG for this job's class during JES2 initialization, JES2 does not print the job log. + + + | **required**: False + | **type**: str + + + + pano + Specifies the programmer's accounting number. The value is 1 through 4 alphanumeric characters. + + + | **required**: False + | **type**: str + + + + room + Specifies the programmer's room number. The value is 1 through 4 alphanumeric characters. + + + | **required**: False + | **type**: str + + + + time + Specifies the estimated execution time in minutes. The value is 1 through 4 decimal numbers. For example, code 30 for 30 minutes. If you omit a time subparameter and a TIME parameter on the JES2 /\*JOBPARM statement, JES2 uses an installation default specified at initialization. If job execution exceeds the time, JES2 sends a message to the operator. + + + | **required**: False + | **type**: int + + + + + class + Allows jobs to be grouped into a class. + + + | **required**: False + | **type**: str + + + + job_name + The name of the CICS startup job. The default value is \ :literal:`APPLID`\ . + + + | **required**: False + | **type**: str + + + + memlimit + Use the MEMLIMIT parameter to specify the limit on the total size of usable 64-bit z/OS storage in a single address space. + + + | **required**: False + | **type**: str + + + + msgclass + Use the MSGCLASS parameter to assign the job log to an output class. The job log is a record of job-related information for the programmer. + + + | **required**: False + | **type**: str + + + + msglevel + Use the MSGLEVEL parameter to control the listing of the JCL output for the job. + + + | **required**: False + | **type**: dict + + + + messages + Indicates which messages the system is to print in the system messages portion of the JCL output. + + + | **required**: False + | **type**: int + | **choices**: 0, 1 + + + + statements + Indicates which job control statements the system is to print in the statement images portion of the JCL output. + + + | **required**: False + | **type**: int + | **choices**: 0, 1, 2 + + + + + programmer_name + Use the programmer's name parameter to identify the person or group responsible for a job. + + + | **required**: False + | **type**: str + + + + region + Use the REGION parameter to specify the amount of central or virtual storage that the job requires. The system applies the value that you code on REGION to each step of the job. + + + | **required**: False + | **type**: str + + + + user + Code the USER parameter to identify to the system the person submitting the job. The user ID is used by RACF®, the system resources manager (SRM), and other system components. + + + | **required**: False + | **type**: str + + + + +le_data_sets + The data set names of the \ :literal:`SCEECICS`\ , \ :literal:`SCEERUN`\ and \ :literal:`SCEERUN2`\ libraries. + + + | **required**: True + | **type**: dict + + + + sceecics + The location of the \ :literal:`SCEECICS`\ library. If \ :literal:`le\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + sceerun + The location of the \ :literal:`SCEERUN`\ library. If \ :literal:`le\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + sceerun2 + The location of the \ :literal:`SCEERUN2`\ library. If \ :literal:`le\_data\_sets.template`\ is provided, this value overrides the template. + + + | **required**: False + | **type**: str + + + + template + The templated location of the Language Enviornment runtime libraries. + + + | **required**: False + | **type**: str + + + + +output_data_sets + The system output data sets such as \ :literal:`CEEMSG`\ and \ :literal:`SYSPRINT`\ , as well as the destination class of the output. + + + | **required**: False + | **type**: dict + + + + ceemsg + Overrides the default class to use a custom class for the \ :literal:`CEEMSG`\ data set. Alternatively, omit the \ :literal:`CEEMSG`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`CEEMSG`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`CEEMSG`\ data set to. + + + | **required**: False + | **type**: str + + + + + ceeout + Overrides the default class to use a custom class for the \ :literal:`CEEOUT`\ data set. Alternatively, omit the \ :literal:`CEEOUT`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`CEEOUT`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`CEEOUT`\ data set to. + + + | **required**: False + | **type**: str + + + + + default_sysout_class + The class to be applied as the default for all of the output data sets. If it isn't provided and if no overrides are specified for an individual output data set, \* is applied. + + + | **required**: False + | **type**: str + + + + dfhcxrf + Overrides the default class to use a custom class for the \ :literal:`DFHCXRF`\ data set. Alternatively, omit the \ :literal:`DFHCXRF`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`DFHCXRF`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`DFHCXRF`\ data set to. + + + | **required**: False + | **type**: str + + + + + logusr + Overrides the default class to use a custom class for the \ :literal:`LOGUSR`\ data set. Alternatively, omit the \ :literal:`LOGUSR`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`LOGUSR`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`LOGUSR`\ data set to. + + + | **required**: False + | **type**: str + + + + + msgusr + Overrides the default class to use a custom class for the \ :literal:`MSGUSR`\ data set. Alternatively, omit the \ :literal:`MSGUSR`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`MSGUSR`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`MSGUSR`\ data set to. + + + | **required**: False + | **type**: str + + + + + sysabend + Overrides the default class to use a custom class for the \ :literal:`SYSABEND`\ data set. Alternatively, omit the \ :literal:`SYSABEND`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`SYSABEND`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`SYSABEND`\ data set to. + + + | **required**: False + | **type**: str + + + + + sysout + Overrides the default class to use a custom class for the \ :literal:`SYSOUT`\ data set. Alternatively, omit the \ :literal:`SYSOUT`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`SYSOUT`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`SYSOUT`\ data set to. + + + | **required**: False + | **type**: str + + + + + sysprint + Overrides the default class to use a custom class for the \ :literal:`SYSPRINT`\ data set. Alternatively, omit the \ :literal:`SYSPRINT`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`SYSPRINT`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`SYSPRINT`\ data set to. + + + | **required**: False + | **type**: str + + + + + sysudump + Overrides the default class to use a custom class for the \ :literal:`SYSUDUMP`\ data set. Alternatively, omit the \ :literal:`SYSUDUMP`\ data set from being added to the job. + + + | **required**: False + | **type**: dict + + + + omit + Specifies whether \ :literal:`SYSUDUMP`\ should be excluded from being added to the list of sysout data sets. + + + | **required**: False + | **type**: bool + + + + sysout + Specify the output class to assign the \ :literal:`SYSUDUMP`\ data set to. + + + | **required**: False + | **type**: str + + + + + +region_data_sets + The location of the region data sets, for example, \ :literal:`REGIONS.ABCD01.DFHAUXT`\ , \ :literal:`REGIONS.ABCD01.DFHCSD`\ and \ :literal:`REGIONS.ABCD01.DFHGCD`\ . + + + | **required**: True + | **type**: dict + + + + dfhauxt + Overrides the templated location for the auxiliary trace A data set. + + + | **required**: False + | **type**: dict + + + + dsn + The name of the auxiliary trace A data set to override the template. + + + | **required**: False + | **type**: str + + + + + dfhbuxt + Overrides the templated location for the auxiliary trace B data set. + + + | **required**: False + | **type**: dict + + + + dsn + The name of the auxiliary trace B data set to override the template. + + + | **required**: False + | **type**: str + + + + + dfhcsd + Overrides the templated location for the CSD. + + + | **required**: False + | **type**: dict + + + + dsn + The name of the CSD to override the template. + + + | **required**: False + | **type**: str + + + + + dfhdmpa + Overrides the templated location for the dump A data set. + + + | **required**: False + | **type**: dict + + + + dsn + The name of the dump A data set to override the template. + + + | **required**: False + | **type**: str + + + + + dfhdmpb + Overrides the templated location for the dump B data set. + + + | **required**: False + | **type**: dict + + + + dsn + The name of the dump B data set to override the template. + + + | **required**: False + | **type**: str + + + + + dfhgcd + Overrides the templated location for the global catalog data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the global catalog to override the template. + + + | **required**: False + | **type**: str + + + + + dfhintra + Overrides the templated location for the intrapartition data set. + + + | **required**: False + | **type**: dict + + + + dsn + The name of the intrapartition data set to override the template. + + + | **required**: False + | **type**: str + + + + + dfhlcd + Overrides the templated location for the local catalog data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the local catalog to override the template. + + + | **required**: False + | **type**: str + + + + + dfhlrq + Overrides the templated location for the local request queue data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the local request queue to override the template. + + + | **required**: False + | **type**: str + + + + + dfhstart + Overrides the templated location for the CICS startup JCL data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the CICS startup JCL data set to override the template. + + The data set name can also be set to a member of an existing PDS or PDSE. + + + | **required**: False + | **type**: str + + + + + dfhtemp + Overrides the templated location for the temporary storage data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the temporary storage to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . This is not required if you provide the data set name (dsn) of all the data sets individually. + + + | **required**: False + | **type**: str + + + + +sit_parameters + Define the system initalization parameters for the CICS region. + + + | **required**: False + | **type**: dict + + + + adi + The ADI parameter specifies the alternate delay interval in seconds for an alternate CICS® region when you are running CICS with XRF. + + + | **required**: False + | **type**: int + + + + aibridge + The AIBRIDGE parameter specifies whether the autoinstall user replaceable module (URM) is to be called when creating bridge facilities (virtual terminals) used by the 3270 bridge mechanism. + + Specify this parameter only in the bridge router region. + + + | **required**: False + | **type**: str + | **choices**: AUTO, YES + + + + aicons + The AICONS parameter specifies whether you want autoinstall support for consoles. + + + | **required**: False + | **type**: str + | **choices**: NO, AUTO, YES + + + + aiexit + The AIEXIT parameter specifies the name of the autoinstall user-replaceable program that you want CICS® to use when autoinstalling local z/OS® Communications Server terminals, APPC connections, virtual terminals, and shipped terminals and connections. + + + | **required**: False + | **type**: str + + + + aildelay + The AILDELAY parameter specifies the delay period that elapses after all sessions between CICS® and an autoinstalled terminal, APPC device, or APPC system are ended, before the terminal or connection entry is deleted. + + + | **required**: False + | **type**: int + + + + aiqmax + The AIQMAX parameter specifies the maximum number of z/OS® Communications Server terminals and APPC connections that can be queued concurrently for autoinstall, the limit is the sum of installs and deletes. + + + | **required**: False + | **type**: int + + + + airdelay + The AIRDELAY parameter specifies the delay period that elapses after an emergency restart before autoinstalled terminal and APPC connection entries that are not in session are deleted. + + + | **required**: False + | **type**: int + + + + akpfreq + The AKPFREQ parameter specifies the number of write requests to the CICS® system log stream output buffer required before CICS writes an activity keypoint. + + + | **required**: False + | **type**: int + + + + autconn + The AUTCONN parameter specifies that the reconnection of terminals after an XRF takeover is to be delayed, to allow time for manual switching. + + + | **required**: False + | **type**: int + + + + autodst + The AUTODST parameter specifies whether CICS is to activate automatic dynamic storage tuning for application programs. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + autoresettime + The AUTORESETTIME parameter specifies the action CICS takes for automatic time changes. + + + | **required**: False + | **type**: str + | **choices**: IMMEDIATE, NO, YES + + + + auxtr + The AUXTR parameter specifies whether the auxiliary trace destination is to be activated at system initialization. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + auxtrsw + The AUXTRSW parameter specifies whether you want the auxiliary trace autoswitch facility. + + + | **required**: False + | **type**: str + | **choices**: NO, NEXT, ALL + + + + bms + The BMS system initialization parameter specifies which version of basic mapping support you require in CICS. + + + | **required**: False + | **type**: str + + + + brmaxkeeptime + The BRMAXKEEPTIME parameter specifies the maximum time (in seconds) that bridge facilities (virtual terminals used by the 3270 bridge) are kept if they are not used. + + + | **required**: False + | **type**: int + + + + cdsasze + The CDSASZE system initialization parameter specifies the size of the CDSA. + + + | **required**: False + | **type**: int + + + + certexpirywarn + The CERTEXPIRYWARN parameter specifies whether CICS® warns about expiring certificates, and if so, how many days ahead of the expiry. + + + | **required**: False + | **type**: str + + + + chkstrm + The CHKSTRM parameter specifies that terminal storage-violation checking is to be activated or deactivated. + + + | **required**: False + | **type**: str + | **choices**: CURRENT, NONE + + + + chkstsk + The CHKSTSK parameter specifies that task storage-violation checking at startup is to be activated or deactivated. + + + | **required**: False + | **type**: str + | **choices**: CURRENT, NONE + + + + cicssvc + The CICSSVC parameter specifies the number that you have assigned to the CICS type 3 SVC. + + + | **required**: False + | **type**: int + + + + cilock + The CILOCK parameter specifies whether or not the control interval lock of a non-RLS VSAM file is to be kept after a successful read-for-update request. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + clintcp + The CLINTCP parameter specifies the default client code page to be used by the DFHCNV data conversion table, but only if the CLINTCP parameter in the DFHCNV macro is set to SYSDEF. + + + | **required**: False + | **type**: str + + + + clsdstp + The CLSDSTP system initialization parameter specifies the notification required for an EXEC CICS ISSUE PASS command. + + + | **required**: False + | **type**: str + | **choices**: NOTIFY, NONOTIFY + + + + clt + The CLT parameter specifies the suffix for the command list table (CLT), if this SIT is used by an alternate XRF system. + + + | **required**: False + | **type**: str + + + + cmdprot + The CMDPROT parameter specifies whether to allow or inhibit CICS validation of start addresses of storage referenced as output parameters on EXEC CICS commands. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + cmdsec + The CMDSEC parameter specifies whether or not you want CICS to honor the CMDSEC option specified on a transaction's resource definition. + + + | **required**: False + | **type**: str + | **choices**: ASIS, ALWAYS + + + + confdata + The CONFDATA parameter specifies whether CICS is to redact sensitive data that might otherwise appear in CICS trace entries or in dumps. + + + | **required**: False + | **type**: str + | **choices**: HIDE, SHOW + + + + conftxt + The CONFTXT system initialization parameter specifies whether CICS is to prevent z/OS Communications Server from tracing user data. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + cpsmconn + The CPSMCONN parameter specifies whether you want CICS to invoke the specified component during initialization of the region. + + + | **required**: False + | **type**: str + | **choices**: NO, CMAS, LMAS, SMSSJ, WUI + + + + crlprofile + The CRLPROFILE parameter specifies the name of the profile that is used to authorize CICS to access the certification revocation lists (CRLs) that are stored in an LDAP server. + + + | **required**: False + | **type**: str + + + + csdacc + The CSDACC parameter specifies the type of access to the CSD to be permitted to this CICS region. + + + | **required**: False + | **type**: str + | **choices**: READWRITE, READONLY + + + + csdbkup + The CSDBKUP parameter specifies whether or not the CSD is eligible for BWO. + + + | **required**: False + | **type**: str + | **choices**: STATIC, DYNAMIC + + + + csdbufnd + The CSDBUFND parameter specifies the number of buffers to be used for CSD data. + + + | **required**: False + | **type**: int + + + + csdbufni + The CSDBUFNI parameter specifies the number of buffers to be used for the CSD index. + + + | **required**: False + | **type**: int + + + + csddisp + The CSDDISP parameter specifies the disposition of the data set to be allocated to the CSD. + + + | **required**: False + | **type**: str + | **choices**: OLD, SHR + + + + csddsn + The CSDDSN parameter specifies the 1-44 character JCL data set name (DSNAME) to be used for the CSD. + + + | **required**: False + | **type**: str + + + + csdfrlog + The CSDFRLOG parameter specifies a number that corresponds to the journal name that CICS uses to identify the forward recovery log stream for the CSD. + + + | **required**: False + | **type**: int + + + + csdinteg + The CSDINTEG parameter specifies the level of read integrity for the CSD if it is accessed in RLS mode. + + + | **required**: False + | **type**: str + | **choices**: UNCOMMITTED, CONSISTENT, REPEATABLE + + + + csdjid + The CSDJID parameter specifies the journal identifier of the journal that you want CICS to use for automatic journaling of file requests against the CSD. + + + | **required**: False + | **type**: str + + + + csdlsrno + The CSDLSRNO system initialization parameter specifies whether the CSD is to be associated with a local shared resource (LSR) pool. + + + | **required**: False + | **type**: str + + + + csdrecov + The CSDRECOVsystem initialization parameter specifies whether the CSD is a recoverable file. + + + | **required**: False + | **type**: str + | **choices**: NONE, ALL, BACKOUTONLY + + + + csdrls + The CSDRLS system initialization parameter specifies whether CICS is to access the CSD in RLS mode. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + csdstrno + The CSDSTRNO system initialization parameter specifies the number of concurrent requests that can be processed against the CSD. + + + | **required**: False + | **type**: int + + + + cwakey + The CWAKEY system initialization parameter specifies the storage key for the common work area (CWA) if you are operating CICS with storage protection (STGPROT=YES). + + + | **required**: False + | **type**: str + | **choices**: USER, CICS + + + + dae + The DAE system initialization parameter specifies the default DAE action when new system dump table entries are created. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + datform + The DATFORM system initialization parameter specifies the external date display standard that you want to use for CICS date displays. + + + | **required**: False + | **type**: str + | **choices**: MMDDYY, DDMMYY, YYMMDD + + + + db2conn + The DB2CONN system initialization parameter specifies whether you want CICS to start the connection automatically during initialization. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + dbctlcon + The DBCTLCON system initialization parameter specifies whether you want CICS to start the DBCTL connection automatically during initialization. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + debugtool + The DEBUGTOOL system initialization parameter specifies whether you want to use debugging profiles to select the programs that will run under the control of a debugging tool. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + dfltuser + The DFLTUSER system initialization parameter specifies the RACF userid of the default user; that is, the user whose security attributes are used to protect CICS resources in the absence of other, more specific, user identification. + + + | **required**: False + | **type**: str + + + + dip + The DIP system initialization parameter specifies whether the batch data interchange program, DFHDIP, is to be included. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + dismacp + The DISMACP system initialization parameter specifies whether CICS is to disable any transaction that terminates abnormally with an ASRD or ASRE abend. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + doccodepage + The DOCCODEPAGE system initialization parameter specifies the default host code page to be used by the document domain. + + + | **required**: False + | **type**: str + + + + dsalim + The DSALIM system initialization parameter specifies the upper limit of the total amount of storage within which CICS® can allocate the individual dynamic storage areas (DSAs) that reside in 24-bit storage. + + + | **required**: False + | **type**: str + + + + dshipidl + The DSHIPIDL system initialization parameter specifies the minimum time, in hours, minutes, and seconds, that an inactive shipped terminal definition must remain installed in this region. + + + | **required**: False + | **type**: int + + + + dshipint + The DSHIPINT system initialization parameter specifies the interval between invocations of the timeout delete mechanism. + + + | **required**: False + | **type**: int + + + + dsrtpgm + The DSRTPGM system initialization parameter specifies the name of a distributed routing program. The distributed routing program must be specified in the DSRTPGM parameter for all routing and potential target regions. + + + | **required**: False + | **type**: str + + + + dtrpgm + The DTRPGM system initialization parameter specifies the name of a dynamic routing program. + + + | **required**: False + | **type**: str + + + + dtrtran + The DTRTRAN system initialization parameter specifies the name of the transaction definition that you want CICS to use for dynamic transaction routing. + + + | **required**: False + | **type**: str + + + + dump + The DUMP system initialization parameter specifies whether the CICS dump domain is to take SDUMPs. + + + | **required**: False + | **type**: str + | **choices**: YES, NO, TABLEONLY + + + + dumpds + The DUMPDS system initialization parameter specifies the transaction dump data set that is to be opened during CICS initialization. + + + | **required**: False + | **type**: str + | **choices**: AUTO, A, B + + + + dumpsw + The DUMPSW system initialization parameter specifies whether you want CICS to switch automatically to the next dump data set when the first is full. + + + | **required**: False + | **type**: str + | **choices**: NO, NEXT, ALL + + + + duretry + The DURETRY system initialization parameter specifies, in seconds, the total time that CICS is to continue trying to obtain a system dump using the SDUMP macro. + + + | **required**: False + | **type**: int + + + + ecdsasze + The ECDSASZE system initialization parameter specifies the size of the ECDSA. + + + | **required**: False + | **type**: str + + + + edsalim + The EDSALIM system initialization parameter specifies the upper limit of the total amount of storage within which CICS® can allocate the individual extended dynamic storage areas (ExxDSAs) that reside in 31-bit (above-the-line) storage; that is, above 16 MB but below 2 GB. + + + | **required**: False + | **type**: str + + + + eodi + The EODI system initialization parameter specifies the end-of-data indicator for input from sequential devices. + + + | **required**: False + | **type**: str + + + + epcdsasze + The EPCDSASZE parameter specifies the size of the EPCDSA dynamic storage area. Message DFHSM0136I at initialization shows the value that is set. + + + | **required**: False + | **type**: str + + + + epudsasze + The EPUDSASZE parameter specifies the size of the EPUDSA dynamic storage area. Message DFHSM0136I at initialization shows the value that is set. + + + | **required**: False + | **type**: str + + + + erdsasze + The ERDSASZE system initialization parameter specifies the size of the ERDSA. + + + | **required**: False + | **type**: str + + + + esdsasze + The ESDSASZE system initialization parameter specifies the size of the ESDSA. + + + | **required**: False + | **type**: str + + + + esmexits + The ESMEXITS system initialization parameter specifies whether installation data is to be passed through the RACROUTE interface to the external security manager (ESM) for use in exits written for the ESM. + + + | **required**: False + | **type**: str + | **choices**: NOINSTLN, INSTLN + + + + eudsasze + The EUDSASZE system initialization parameter specifies the size of the EUDSA. + + + | **required**: False + | **type**: str + + + + fcqronly + The FCQRONLY system initialization parameter specifies whether you want CICS to force all file control requests to run under the CICS QR TCB. This parameter applies to file control requests that access VSAM RLS files and local VSAM LSR files. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + fct + The FCT system initialization parameter specifies the suffix of the file control table to be used. + + + | **required**: False + | **type**: str + + + + fepi + The FEPI system initialization parameter specifies whether or not you want to use the Front End Programming Interface feature (FEPI). + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + fldsep + The FLDSEP system initialization parameter specifies 'ON'e through four field-separator characters, each of which indicates end of field in the terminal input data. + + + | **required**: False + | **type**: str + + + + fldstrt + The FLDSTRT system initialization parameter specifies a single character to be the field-name-start character for free-form input for built-in functions. + + + | **required**: False + | **type**: str + + + + forceqr + The FORCEQR system initialization parameter specifies whether you want CICS to force all CICS API user application programs that are specified as threadsafe to run under the CICS QR TCB, as if they were specified as quasi-reentrant programs. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + fsstaff + The FSSTAFF system initialization parameter prevents transactions initiated by function-shipped EXEC CICS START requests being started against incorrect terminals. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + ftimeout + The FTIMEOUT system initialization parameter specifies a timeout interval for requests made on files that are opened in RLS mode. + + + | **required**: False + | **type**: int + + + + gmtext + The GMTEXT system initialization parameter specifies whether the default logon message text (WELCOME TO CICS) or your own message text is to be displayed on the screen. + + + | **required**: False + | **type**: str + + + + gmtran + The GMTRAN system initialization parameter specifies the ID of a transaction. + + + | **required**: False + | **type**: str + + + + gntran + The GNTRAN system initialization parameter specifies the transaction that you want CICS to invoke when a user's terminal-timeout period expires, and instructs CICS whether to keep a pseudo-conversation in use at a terminal that is the subject of a timeout sign-off. + + + | **required**: False + | **type**: str + + + + grname + The GRNAME system initialization parameter specifies the z/OS Communications Server generic resource name, as 1 through 8 characters, under which a group of CICS terminal-owning regions in a CICSplex register to z/OS Communications Server. + + + | **required**: False + | **type**: str + + + + grplist + The GRPLIST system initialization parameter specifies the names of up to four lists of resource definition groups on the CICS system definition file (CSD). The resource definitions in all the groups in the specified lists are loaded during initialization when CICS performs a cold start. If a warm or emergency start is performed, the resource definitions are derived from the global catalog, and the GRPLIST parameter is ignored. + + + | **required**: False + | **type**: str + + + + gtftr + The GTFTR system initialization parameter specifies whether CICS can use the MVS generalized trace facility (GTF) as a destination for trace data. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + hpo + The HPO system initialization parameter specifies whether you want to use the z/OS Communications Server authorized path feature of the high performance option (HPO). + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + httpserverhdr + The HTTPSERVERHDR system initialization parameter specifies the value (up to 64 characters) that CICS sets in the server header of HTTP responses. + + + | **required**: False + | **type**: str + + + + httpusragenthdr + The HTTPUSRAGENTHDR system initialization parameter specifies the value (up to 64 characters) that CICS sets in the user-agent header of HTTP requests. + + + | **required**: False + | **type**: str + + + + icp + The ICP system initialization parameter specifies that you want to perform a cold start for interval control program. + + + | **required**: False + | **type**: str + | **choices**: COLD + + + + icv + The ICV system initialization parameter specifies the region exit time interval in milliseconds. + + + | **required**: False + | **type**: int + + + + icvr + The ICVR system initialization parameter specifies the default runaway task time interval in milliseconds as a decimal number. + + + | **required**: False + | **type**: int + + + + icvtsd + The ICVTSD system initialization parameter specifies the terminal scan delay value. + + + | **required**: False + | **type**: int + + + + infocenter + The INFOCENTER system initialization parameter specifies the location of the online . If you add this parameter to the Web User Interface (WUI) CICS startup JCL, a link labeled Information Center is displayed on WUI views and menus. If you do not code this parameter, CICS does not construct links to IBM Documentation. + + + | **required**: False + | **type**: str + + + + initparm + The INITPARM system initialization parameter specifies parameters that are to be passed to application programs that use the ASSIGN INITPARM command. + + + | **required**: False + | **type**: str + + + + intrdrjobuser + The INTRDRJOBUSER system initialization parameter instructs whether to use the task user ID or the CICS® region user ID as the job user ID for a JOB card that is submitted, without a USER parameter, by using SPOOLOPEN with USERID("INTRDR") and SPOOLWRITE. The default is the task user ID unless set otherwise by INTRDRJOBUSER. + + + | **required**: False + | **type**: str + | **choices**: TASK, REGION + + + + inttr + The INTTR system initialization parameter specifies whether the internal CICS trace destination is to be activated at system initialization. + + + | **required**: False + | **type**: str + | **choices**: ON, OFF + + + + ircstrt + The IRCSTRT system initialization parameter specifies whether IRC is to be started up at system initialization. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + isc + The ISC system initialization parameter specifies whether the CICS programs required for multiregion operation (MRO) and are to be included. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + jesdi + The JESDI system initialization parameter specifies, in a SIT for an alternate XRF system, the JES delay interval. + + + | **required**: False + | **type**: int + + + + jvmprofiledir + The JVMPROFILEDIR system initialization parameter specifies the name (up to 240 characters long) of a z/OS UNIX directory that contains the JVM profiles for CICS. CICS searches this directory for the profiles it needs to configure JVMs. + + + | **required**: False + | **type**: str + + + + kerberosuser + The KERBEROSUSER system initialization parameter specifies the user ID that is associated with the Kerberos service principal for the CICS region. + + + | **required**: False + | **type**: str + + + + keyring + The KEYRING system initialization parameter specifies the fully qualified name of the key ring, within the RACF database, that contains the keys and X.509 certificates used by CICS support for the Secure Sockets Layer (SSL) and for web services security. The region user ID that will use the key ring must either own the key ring or have the authority to use the key ring if it is owned by a different region user ID. You can create an initial key ring with the DFH$RING exec in .CICS.SDFHSAMP. + + + | **required**: False + | **type**: str + + + + lgdfint + The LGDFINT system initialization parameter specifies the log defer interval to be used by CICS® log manager when determining how long to delay a forced journal write request before invoking the MVS™ system logger. + + + | **required**: False + | **type**: int + + + + lgnmsg + The LGNMSG system initialization parameter specifies whether z/OS Communications Server logon data is to be made available to an application program. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + llacopy + The LLACOPY system initialization parameter specifies the situations where CICS uses either the LLACOPY macro or the BLDL macro when locating modules in the DFHRPL or dynamic LIBRARY concatenation. + + + | **required**: False + | **type**: str + | **choices**: YES, NO, NEWCOPY + + + + localccsid + The LOCALCCSID system initialization parameter specifies the default CCSID for the local region. + + + | **required**: False + | **type**: int + + + + lpa + The LPA system initialization parameter specifies whether CICS and user modules can be used from the link pack areas. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + maxopentcbs + The MAXOPENTCBS system initialization parameter specifies the maximum number, in the range 32 through 4032, of open task control blocks (open TCBs) CICS® can create in the pool of L8 and L9 mode TCBs. + + + | **required**: False + | **type**: int + + + + maxsockets + The MAXSOCKETS system initialization parameter specifies the maximum number of IP sockets that can be managed by the CICS sockets domain. + + + | **required**: False + | **type**: int + + + + maxssltcbs + The MAXSSLTCBS system initialization parameter specifies the maximum number of S8 TCBs that can run in the SSL pool. + + + | **required**: False + | **type**: int + + + + maxtlslevel + The MAXTLSLEVEL system initialization parameter specifies the maximum TLS protocol that CICS uses for secure TCP/IP connections. + + + | **required**: False + | **type**: str + | **choices**: TLS11, TLS12, TLS13 + + + + maxxptcbs + The MAXXPTCBS system initialization parameter specifies the maximum number, in the range 1 through 2000, of open X8 and X9 TCBs that can exist concurrently in the CICS region. + + + | **required**: False + | **type**: int + + + + mct + The MCT system initialization parameter specifies the monitoring control table suffix. + + + | **required**: False + | **type**: str + + + + mintlslevel + The MINTLSLEVEL system initialization parameter specifies the minimum TLS protocol that CICS uses for secure TCP/IP connections. + + + | **required**: False + | **type**: str + | **choices**: TLS11, TLS12, TLS13 + + + + mn + The MN system initialization parameter specifies whether monitoring is to be switched 'ON' or 'OFF' at initialization. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + mnconv + The MNCONV system initialization parameter specifies whether conversational tasks have separate performance class records produced for each pair of terminal control I/O requests. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + mnexc + The MNEXC system initialization parameter specifies whether the monitoring exception class is to be made active during initialization. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + mnfreq + The MNFREQ system initialization parameter specifies the interval for which CICS automatically produces a transaction performance class record for any long-running transaction. + + + | **required**: False + | **type**: int + + + + mnidn + The MNIDN system initialization parameter specifies whether the monitoring identity class is to be made active during CICS initialization. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + mnper + The MNPER system initialization parameter specifies whether the monitoring performance class is to be made active during CICS initialization. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + mnres + The MNRES system initialization parameter specifies whether transaction resource monitoring is to be made active during CICS initialization. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + mnsync + The MNSYNC system initialization parameter specifies whether you want CICS to produce a transaction performance class record when a transaction takes an implicit or explicit syncpoint (unit-of-work). + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + mntime + The MNTIME system initialization parameter specifies whether you want the time stamp fields in the performance class monitoring data to be returned to an application using the EXEC CICS COLLECT STATISTICS MONITOR(taskno) command in either GMT or local time. + + + | **required**: False + | **type**: str + | **choices**: GMT, LOCAL + + + + mqconn + The MQCONN system initialization parameter specifies whether you want CICS to start a connection to automatically during initialization. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + mrobtch + The MROBTCH system initialization parameter specifies the number of events that must occur before CICS is posted for dispatch because of the batching mechanism. + + + | **required**: False + | **type**: int + + + + mrofse + The MROFSE system initialization parameter specifies whether you want to extend the lifetime of the long-running mirror to keep it allocated until the end of the task rather than after a user syncpoint for function shipping applications. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + mrolrm + The MROLRM system initialization parameter specifies whether you want to establish an MRO long-running mirror task. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + msgcase + The MSGCASE system initialization parameter specifies how you want the message domains to display mixed case messages. + + + | **required**: False + | **type**: str + | **choices**: MIXED, UPPER + + + + msglvl + The MSGLVL system initialization parameter specifies the message level that controls the generation of messages to the console and JES message log. + + + | **required**: False + | **type**: int + | **choices**: 1, 0 + + + + mxt + The MXT system initialization parameter specifies the maximum number, in the range 10 through 2000, of user tasks that can exist in a CICS system at the same time. The MXT value does not include CICS system tasks. + + + | **required**: False + | **type**: int + + + + natlang + The NATLANG system initialization parameter specifies the single-character code for the language to be supported in this CICS run. + + + | **required**: False + | **type**: str + | **choices**: E, C, K + + + + ncpldft + The NCPLDFT system initialization parameter specifies the name of the default named counter pool to be used by the CICS region 'ON' calls it makes to a named counter server. + + + | **required**: False + | **type**: str + + + + newsit + The NEWSIT system initialization parameter specifies whether CICS is to load the specified SIT, and enforce the use of all system initialization parameters, modified by any system initialization parameters provided by PARM, SYSIN, or the system console, even in a warm start. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + nistsp800131a + The NISTSP800131A system initialization parameter specifies whether the CICS region is to check for conformance to the NIST SP800-131A standard. + + + | **required**: False + | **type**: str + | **choices**: NOCHECK, CHECK + + + + nonrlsrecov + The NONRLSRECOV system initialization parameter specifies whether VSAM catalog recovery options should override those specified on the CICS FILE resource definition for all non-RLS files. Default behavior, with NONRLSRECOV=VSAMCAT, will take recovery attributes from the catalog if they are present, and from the file definition otherwise. RLS files must always specify recovery options on the catalog. + + + | **required**: False + | **type**: str + | **choices**: VSAMCAT, FILEDEF + + + + nqrnl + The NQRNL system initialization parameter controls resource name list (RNL) processing by z/OS global resource serialization, which can cause the scope value of a resource to change. CICS uses z/OS global resource serialization to provide sysplex-wide protection of application resources. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + offsite + The 'OFF'SITE system initialization parameter specifies whether CICS is to restart in 'OFF'-site recovery mode; that is, a restart is taking place at a remote site. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + opertim + The OPERTIM system initialization parameter specifies the write-to-operator timeout value, in the range 0 through 86400 seconds (24 hours). + + + | **required**: False + | **type**: int + + + + opndlim + The OPNDLIM system initialization parameter specifies the destination and close destination request limit. + + + | **required**: False + | **type**: int + + + + parmerr + The PARMERR system initialization parameter specifies what action you want to follow if CICS detects incorrect system initialization parameter overrides during initialization. + + + | **required**: False + | **type**: str + | **choices**: INTERACT, IGNORE, ABEND + + + + pcdsasze + The PCDSASZE parameter specifies the size of the PCDSA dynamic storage area. Message DFHSM0136I at initialization shows the value that is set. + + + | **required**: False + | **type**: int + + + + pdi + The PDI system initialization parameter specifies the XRF primary delay interval, in seconds, in a SIT for an active CICS region. + + + | **required**: False + | **type**: int + + + + pdir + The PDIR system initialization parameter specifies a suffix for the PDIR list. + + + | **required**: False + | **type**: str + + + + pgaictlg + The PGAICTLG system initialization parameter specifies whether autoinstalled program definitions should be cataloged. + + + | **required**: False + | **type**: str + | **choices**: MODIFY, NONE, ALL + + + + pgaiexit + The PGAIEXIT system initialization parameter specifies the name of the program autoinstall exit program. + + + | **required**: False + | **type**: str + + + + pgaipgm + The PGAIPGM system initialization parameter specifies the state of the program autoinstall function at initialization. + + + | **required**: False + | **type**: str + | **choices**: INACTIVE, ACTIVE + + + + pgchain + The PGCHAIN system initialization parameter specifies the character string that is identified by terminal control as a BMS terminal page-chaining command. + + + | **required**: False + | **type**: str + + + + pgcopy + The PGCOPY system initialization parameter specifies the character string that is identified by terminal control as a BMS command to copy output from one terminal to another. + + + | **required**: False + | **type**: str + + + + pgpurge + The PGPURGE system initialization parameter specifies the character string that is identified by terminal control as a BMS terminal page-purge command. + + + | **required**: False + | **type**: str + + + + pgret + The PGRET system initialization parameter specifies the character string that is recognized by terminal control as a BMS terminal page-retrieval command. + + + | **required**: False + | **type**: str + + + + pltpi + The PLTPI system initialization parameter specifies the suffix for, or the full name of, a program list table that contains a list of programs to be run in the final stages of system initialization. + + + | **required**: False + | **type**: str + + + + pltpisec + The PLTPISEC system initialization parameter specifies whether you want CICS to perform command security or resource security checking for PLT programs during CICS initialization. + + + | **required**: False + | **type**: str + | **choices**: NONE, CMDSEC, RESSEC, ALL + + + + pltpiusr + The PLTPIUSR system initialization parameter specifies the user ID that CICS uses for security checking for PLT programs that run during CICS initialization. + + + | **required**: False + | **type**: str + + + + pltsd + The PLTSD system initialization parameter specifies the suffix for, or full name of, a program list table that contains a list of programs to be run during system termination. + + + | **required**: False + | **type**: str + + + + prgdlay + The PRGDLAY system initialization parameter specifies the BMS purge delay time interval that is added t the specified delivery time to determine when a message is to be considered undeliverable and therefore purged. + + + | **required**: False + | **type**: int + + + + print + The PRINT system initialization parameter specifies the method of requesting printout of the contents of a 3270 screen. + + + | **required**: False + | **type**: str + | **choices**: NO, YES, PA1, PA2, PA3 + + + + prtyage + The PRTYAGE system initialization parameter specifies the number of milliseconds to be used in the priority aging algorithm that is used to increment the priority of a task. + + + | **required**: False + | **type**: int + + + + prvmod + The PRVMOD system initialization parameter specifies the names of those modules that are not to be used from the LPA. + + + | **required**: False + | **type**: str + + + + psbchk + The PSBCHK system initialization parameter specifies whether CICS is to perform PSB authorization checks for remote terminal users who use transaction routing to initiate a transaction in this CICS region to access an attached IMS system. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + psdint + The PSDINT system initialization parameter specifies the persistent session delay interval, which states if, and for how long, z/OS CommunicationsServer holds sessions in a recovery-pending state. + + + | **required**: False + | **type**: int + + + + pstype + The PSTYPE system initialization parameter specifies whether CICS uses z/OS Communications Server single-node persistent sessions (SNPS), multinode persistent sessions (MNPS), or does not use z/OS Communications Server persistent sessions support (NOPS). + + + | **required**: False + | **type**: str + | **choices**: SNPS, MNPS, NOPS + + + + pudsasze + The PUDSASZE parameter specifies the size of the PUDSA dynamic storage area. Message DFHSM0136I at initialization shows the value that is set. + + + | **required**: False + | **type**: str + + + + pvdelay + The PVDELAY system initialization parameter specifies the persistent verification delay as a value in the range 0 through 10080 minutes (up to 7 days). + + + | **required**: False + | **type**: int + + + + quiestim + The QUIESTIM system initialization parameter specifies a timeout value for data set quiesce requests. + + + | **required**: False + | **type**: int + + + + racfsync + The RACFSYNC system initialization parameter specifies whether CICS listens for type 71 ENF events and refreshes user security. + + + | **required**: False + | **type**: str + | **choices**: YES, NO, CPSM + + + + ramax + The RAMAX system initialization parameter specifies the size in bytes of the I/O area allocated for each RECEIVE ANY issued by CICS, in the range 0 through 32767 bytes. + + + | **required**: False + | **type**: int + + + + rapool + The RAPOOL system initialization parameter specifies the number of concurrent receive-any requests that CICS is to process from the z/OS Communications Server for SNA. + + + | **required**: False + | **type**: str + + + + rdsasze + The RDSASZE system initialization parameter specifies the size of the RDSA. + + + | **required**: False + | **type**: str + + + + rentpgm + The RENTPGM system initialization parameter specifies whether you want CICS to allocate the read-only DSAs from read-only key-0 protected storage. + + + | **required**: False + | **type**: str + | **choices**: PROTECT, NOPROTECT + + + + resoverrides + The RESOVERRIDES system initialization parameter specifies the 1-64 character name of the resource overrides file. For more information, see . + + + | **required**: False + | **type**: str + + + + resp + The RESP system initialization parameter specifies the type of request that CICS terminal control receives from logical units. + + + | **required**: False + | **type**: str + | **choices**: FME, RRN + + + + ressec + The RESSEC system initialization parameter specifies whether you want CICS to honor the RESSEC option specified on a transaction's resource definition. + + + | **required**: False + | **type**: str + | **choices**: ASIS, ALWAYS + + + + rls + The RLS system initialization parameter specifies whether CICS is to support VSAM record-level sharing (RLS). + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + rlstolsr + The RLSTOLSR system initialization parameter specifies whether CICS is to include files that are to be opened in RLS mode when calculating the number of buffers, strings, and other resources for an LSR pool. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + rmtran + The RMTRAN system initialization parameter specifies the name of the transaction that you want an alternate CICS to initiate when logged-on class 1 terminals, which are defined with the attribute RECOVNOTIFY(TRANSACTION) specified, are switched following a takeover. + + + | **required**: False + | **type**: str + + + + rrms + The RRMS system initialization parameter specifies whether CICS is to register as a resource manager with recoverable resource management services (RRMS). + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + rst + The RST system initialization parameter specifies a recoverable service table suffix. + + + | **required**: False + | **type**: str + + + + rstsignoff + The RSTSIGNOFF system initialization parameter specifies whether all users signed-on to the active CICS region are to remain signed-on following a persistent sessions restart or an XRF takeover. + + + | **required**: False + | **type**: str + | **choices**: NOFORCE, FORCE + + + + rstsigntime + The RSTSIGNTIME parameter specifies the timeout delay interval for signon retention during a persistent sessions restart or an XRF takeover. + + + | **required**: False + | **type**: int + + + + ruwapool + The RUWAPOOL parameter specifies the option for allocating a storage pool the first time a program invoked by Language Environment runs in a task. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + sdsasze + The SDSASZE system initialization parameter specifies the size of the SDSA. + + + | **required**: False + | **type**: str + + + + sdtmemlimit + The SDTMEMLIMIT system initialization parameter specifies a limit to the amount of storage above the bar that is available for shared data tables to use for control information (entry descriptors, backout elements, and index nodes). The default is 4 GB. When you set this parameter, check your current setting for the z/OS MEMLIMIT parameter. + + + | **required**: False + | **type**: str + + + + sdtran + The SDTRAN system initialization parameter specifies the name of the shutdown transaction to be started at the beginning of normal and immediate shutdown. + + + | **required**: False + | **type**: str + + + + sec + The SEC system initialization parameter specifies what level of external security you want CICS to use. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + secprfx + The SECPRFX system initialization parameter specifies whether CICS prefixes the resource names in any authorization requests to RACF. + + + | **required**: False + | **type**: str + + + + sit + The SIT system initialization parameter specifies the suffix, if any, of the system initialization table that you want CICS to load at the start of initialization. + + + | **required**: False + | **type**: str + + + + skrxxxx + The SKRxxxx system initialization parameter specifies that a single-keystroke-retrieval operation is required. + + Provide a dictionary with the key specifying a key on the 3270 keyboard and the value identifying a page retrieval command that the 3270 key represents. For example, PF20: PGPURGE + + The valid keys you can specify are PA1 through PA3, and PF1 through PF24. + + + | **required**: False + | **type**: dict + + + + snpreset + The SNPRESET system initialization parameter specifies whether preset userid terminals share a single access control environment element (ACEE) that is associated with the userid, or a unique ACEE for every terminal. + + + | **required**: False + | **type**: str + | **choices**: UNIQUE, SHARED + + + + snscope + The SNSCOPE system initialization parameter specifies whether a userid can be signed on to CICS more than once, within the scope of a single CICS region, a single MVS image, and a sysplex. + + + | **required**: False + | **type**: str + | **choices**: NONE, CICS, MVSIMAGE, SYSPLEX + + + + sotuning + The SOTUNING system initialization parameter specifies whether performance tuning for HTTP connections will occur to protect CICS from unconstrained resource demand. + + + | **required**: False + | **type**: str + | **choices**: YES, 520 + + + + spctr + The SPCTR system initialization parameter specifies the level of special tracing required for CICS as a whole. + + + | **required**: False + | **type**: str + + + + spctrxx + The SPCTRxx system initialization parameter specifies the level of special tracing activated for a particular CICS component. When you enable special tracing for a transaction, a terminal, or both, the trace points of this component at the specified trace level are eligible to make trace calls at any given point in the process of a special tracing task. + + Provide a dictionary with the key specifying a two-letter code that represents a component and the value specifying the trace level. For example: AP=1-2 + + You can provide several dictionaries to specify the level of special tracing for several components. Each component is defined by one dictionary. + + For information about CICS components and their respetive two-letter code, see \ `Component names and abbreviations `__\ . + + + | **required**: False + | **type**: dict + + + + spool + The SPOOL system initialization parameter specifies whether the system spooling interface is required. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + srbsvc + The SRBSVC system initialization parameter specifies the number that you have assigned to the CICS type 6 SVC. + + + | **required**: False + | **type**: int + + + + srt + The SRT system initialization parameter specifies the system recovery table suffix. + + + | **required**: False + | **type**: str + + + + srvercp + The SRVERCP system initialization parameter specifies the default server code page to be used by the DFHCNV data conversion table but only if the SRVERCP parameter in the DFHCNV macro is set to SYSDEF. + + + | **required**: False + | **type**: str + + + + sslcache + The SSLCACHE system initialization parameter specifies whether session IDs for SSL sessions are to be cached locally or at sysplex level for reuse by the CICS® region. The SSL cache allows CICS to perform abbreviated handshakes with clients that it has previously authenticated. + + + | **required**: False + | **type**: str + | **choices**: CICS, SYSPLEX + + + + ssldelay + The SSLDELAY system initialization parameter specifies the length of time in seconds for which CICS retains session ids for secure socket connections. + + + | **required**: False + | **type**: int + + + + start + The START system initialization parameter specifies the type of start for the system initialization program. + + + | **required**: False + | **type**: str + | **choices**: AUTO, INITIAL, COLD, STANDBY, (INITIAL, ALL), (AUTO, ALL), (COLD, ALL), (STANDBY, ALL) + + + + starter + The STARTER system initialization parameter specifies whether the generation of starter system modules (with $ and + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + stateod + The STATEOD system initialization parameter specifies the end-of-day time in the format hhmmss. + + + | **required**: False + | **type**: int + + + + statint + The STATINT system initialization parameter specifies the recording interval for system statistics in the format hhmmss. + + + | **required**: False + | **type**: int + + + + statrcd + The STATRCD system initialization parameter specifies the interval statistics recording status at CICS initialization. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + stgprot + The STGPROT system initialization parameter specifies whether you want storage protection to operate in the CICS region. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + stgrcvy + The STGRCVY system initialization parameter specifies whether CICS should try to recover from a storage violation. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + stntr + The STNTR system initialization parameter specifies the level of standard tracing required for CICS as a whole. + + + | **required**: False + | **type**: str + + + + stntrxx + The STNTRxx system initialization parameter specifies the level of standard tracing for a particular CICS component. + + Provide a dictionary with the key specifying a two-letter code that represents a component and the value specifying the trace level. For example: AP=1-2 + + You can provide several dictionaries to specify the level of standard tracing for several components. Each component is defined by one dictionary. For components that are not defined here, their standard tracing levels are determined by STNTR. + + For information about CICS components and their respective two-letter code, see \ `Component names and abbreviations `__\ . + + + | **required**: False + | **type**: dict + + + + subtsks + The SUBTSKS system initialization parameter specifies the number of task control blocks (TCBs) you want CICS to use for running tasks in concurrent mode. + + + | **required**: False + | **type**: int + | **choices**: 0, 1 + + + + suffix + The SUFFIX system initialization parameter specifies the last two characters of the name of this system initialization table. + + + | **required**: False + | **type**: str + + + + sydumax + The SYDUMAX system initialization parameter specifies the limit on the number of system dumps that can be taken per dump table entry. + + + | **required**: False + | **type**: int + + + + sysidnt + The SYSIDNT system initialization parameter specifies a 1- to 4-character name that is known only to your CICS region. + + + | **required**: False + | **type**: str + + + + systr + The SYSTR system initialization parameter specifies the setting of the main system trace flag. + + + | **required**: False + | **type**: str + | **choices**: ON, OFF + + + + takeovr + The TAKEOVR system initialization parameter specifies the action to be taken by the alternate CICS region, following the apparent loss of the surveillance signal in the active CICS region. + + + | **required**: False + | **type**: str + | **choices**: MANUAL, AUTO, COMMAND + + + + tbexits + The TBEXITS system initialization parameter specifies the names of your backout exit programs for use during emergency restart backout processing. + + + | **required**: False + | **type**: str + + + + tcp + The TCP system initialization parameter specifies whether the pregenerated non-z/OS Communications Server terminal control program, DFHTCP, is to be included. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + tcpip + The TCPIP system initialization parameter specifies whether CICS TCP/IP services are to be activated at CICS startup. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + tcsactn + The TCSACTN system initialization parameter specifies the required action that CICS terminal control should take if the terminal control shutdown wait threshold expires. + + + | **required**: False + | **type**: str + | **choices**: NONE, UNBIND, FORCE + + + + tcswait + The TCSWAIT system initialization parameter specifies the required CICS terminal control shutdown wait threshold. + + + | **required**: False + | **type**: str + + + + tct + The TCT system initialization parameter specifies which terminal control table, if any, is to be loaded. + + + | **required**: False + | **type**: str + + + + tctuakey + The TCTUAKEY system initialization parameter specifies the storage key for the terminal control table user areas (TCTUAs) if you are operating CICS with storage protection (STGPROT=YES). + + + | **required**: False + | **type**: str + | **choices**: USER, CICS + + + + tctualoc + The TCTUALOC system initialization parameter specifies where terminal user areas (TCTUAs) are to be stored. + + + | **required**: False + | **type**: str + | **choices**: BELOW, ANY + + + + td + The TD system initialization parameter specifies the number of VSAM buffers and strings to be used for intrapartition transient data (TD). + + + | **required**: False + | **type**: str + + + + tdintra + The TDINTRA system initialization parameter specifies whether CICS is to initialize with empty intrapartition TD queues. + + + | **required**: False + | **type**: str + | **choices**: NOEMPTY, EMPTY + + + + traniso + The TRANISO system initialization parameter specifies, together with the STGPROT system initialization parameter, whether you want transaction isolation in the CICS region. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + trap + The TRAP system initialization parameter specifies whether the FE global trap exit is to be activated at system initialization. + + + | **required**: False + | **type**: str + | **choices**: OFF, ON + + + + trdumax + The TRDUMAX system initialization parameter specifies the limit on the number of transaction dumps that may be taken per Dump Table entry. + + + | **required**: False + | **type**: int + + + + trtabsz + The TRTABSZ system initialization parameter specifies the size, in kilobytes, of the internal trace table. + + + | **required**: False + | **type**: int + + + + trtransz + The TRTRANSZ system initialization parameter specifies the size, in kilobytes, of the transaction dump trace table. + + + | **required**: False + | **type**: int + + + + trtranty + The TRTRANTY system initialization parameter specifies which trace entries should be copied from the internal trace table to the transaction dump trace table. + + + | **required**: False + | **type**: str + | **choices**: TRAN, ALL + + + + ts + The TS system initialization parameter specifies whether you want to perform a cold start for temporary storage, as well as the number of VSAM buffers and strings to be used for auxiliary temporary storage. + + + | **required**: False + | **type**: str + + + + tsmainlimit + The TSMAINLIMIT system initialization parameter specifies a limit for the storage that is available for main temporary storage queues to use. You can specify an amount of storage in the range 1 - 32768 MB (32 GB), but this amount must not be greater than 25% of the value of the z/OS parameter MEMLIMIT. The default is 64 MB. + + + | **required**: False + | **type**: str + + + + tst + The TST system initialization parameter specifies the temporary storage table suffix. + + + | **required**: False + | **type**: str + + + + udsasze + The UDSASZE system initialization parameter specifies the size of the UDSA. + + + | **required**: False + | **type**: str + + + + uownetql + The UOWNETQL system initialization parameter specifies a qualifier for the NETUOWID for units of work initiated on the local CICS region. + + + | **required**: False + | **type**: str + + + + usertr + The USERTR system initialization parameter specifies whether the main user trace flag is to be set on or off. + + + | **required**: False + | **type**: str + | **choices**: ON, OFF + + + + usrdelay + The USRDELAY system initialization parameter specifies the maximum time, in the range 0 - 10080 minutes (up to seven days), that an eligible user ID and its associated attributes are cached in the CICS region after use. A user ID that is retained in the user table can be reused. + + + | **required**: False + | **type**: int + + + + ussconfig + The USSCONFIG system initialization parameter specifies the name and path of the root directory for configuration files on z/OS UNIX. + + + | **required**: False + | **type**: str + + + + usshome + The USSHOME system initialization parameter specifies the name and path of the root directory for files on z/OS UNIX. + + + | **required**: False + | **type**: str + + + + vtam + The VTAM system initialization parameter specifies whether the z/OS Communications Server access method is to be used. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + vtprefix + The VTPREFIX system initialization parameter specifies the first character to be used for the terminal identifiers (termids) of autoinstalled virtual terminals. + + + | **required**: False + | **type**: str + + + + webdelay + The WEBDELAY system initialization parameter specifies two Web delay periods. + + + | **required**: False + | **type**: str + + + + wlmhealth + The WLMHEALTH system initialization parameter specifies the time interval and the health adjustment value to be used by CICS® on z/OS® Workload Manager Health API (IWM4HLTH) calls, which CICS makes to inform z/OS WLM about the health state of a CICS region. + + + | **required**: False + | **type**: str + + + + wrkarea + The WRKAREA system initialization parameter specifies the number of bytes to be allocated to the common work area (CWA). + + + | **required**: False + | **type**: int + + + + xappc + The XAPPC system initialization parameter specifies whether RACF session security can be used when establishing APPC sessions. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + xcfgroup + The XCFGROUP system initialization parameter specifies the name of the cross-system coupling facility (XCF) group to be joined by this region. + + + | **required**: False + | **type**: str + + + + xcmd + The XCMD system initialization parameter specifies whether you want CICS to perform command security checking, and optionally the RACF resource class name in which you have defined the command security profiles. + + + | **required**: False + | **type**: str + + + + xdb2 + The XDB2 system initialization parameter specifies whether you want CICS to perform DB2ENTRY security checking. + + + | **required**: False + | **type**: str + + + + xdct + The XDCT system initialization parameter specifies whether you want CICS to perform resource security checking for transient data queues. + + + | **required**: False + | **type**: str + + + + xfct + The XFCT system initialization parameter specifies whether you want CICS to perform file resource security checking, and optionally specifies the RACF resource class name in which you have defined the file resource security profiles. + + + | **required**: False + | **type**: str + + + + xhfs + The XHFS system initialization parameter specifies whether CICS is to check the transaction user's ability to access files in the z/OS UNIX System Services file system. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + xjct + The XJCT system initialization parameter specifies whether you want CICS to perform journal resource security checking. + + + | **required**: False + | **type**: str + + + + xlt + The XLT system initialization parameter specifies a suffix for the transaction list table. + + + | **required**: False + | **type**: str + + + + xpct + The XPCT system initialization parameter specifies whether you want CICS to perform started transaction resource security checking, and optionally specifies the name of the RACF resource class name in which you have defined the started task security profiles. + + + | **required**: False + | **type**: str + + + + xppt + The XPPT system initialization parameter specifies that CICS is to perform application program resource security checks and optionally specifies the RACF resource class name in which you have defined the program resource security profiles. + + + | **required**: False + | **type**: str + + + + xpsb + The XPSB system initialization parameter specifies whether you want CICS to perform program specification block (PSB) security checking and optionally specifies the RACF resource class name in which you have defined the PSB security profiles. + + + | **required**: False + | **type**: str + + + + xptkt + The XPTKT system initialization parameter specifies whether CICS checks if a user can generate a PassTicket for the user's userid using the EXEC CICS REQUEST PASSTICKET command, the EXEC CICS REQUEST ENCRYPTPTKT command, or the EXEC FEPI REQUEST PASSTICKET command. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + xres + The XRES system initialization parameter specifies whether you want CICS to perform resource security checking for particular CICS resources and optionally specifies the general resource class name in which you have defined the resource security profiles. + + + | **required**: False + | **type**: str + + + + xrf + The XRF system initialization parameter specifies whether XRF support is to be included in the CICS region. + + + | **required**: False + | **type**: str + | **choices**: NO, YES + + + + xtran + The XTRAN system initialization parameter specifies whether you want CICS to perform transaction security checking and optionally specifies the RACF resource class name in which you have defined the transaction security profiles. + + + | **required**: False + | **type**: str + + + + xtst + The XTST system initialization parameter specifies whether you want CICS to perform security checking for temporary storage queues and optionally specifies the RACF resource class name in which you have defined the temporary storage security profiles. + + + | **required**: False + | **type**: str + + + + xuser + The XUSER system initialization parameter specifies whether CICS is to perform surrogate user checks. + + + | **required**: False + | **type**: str + | **choices**: YES, NO + + + + zosmoninterval + The ZOSMONINTERVAL system initialization parameter specifies the sampling interval, in seconds, for the CICS® z/OS storage monitor task. + + + | **required**: False + | **type**: int + + + + zossos24unalloc + The ZOSSOS24UNALLOC system initialization parameter specifies short-on-storage (SOS) thresholds in KB for the total amount of unallocated z/OS® user region storage and for the largest contiguous storage area available in it. + + + | **required**: False + | **type**: str + + + + zossos31unalloc + The ZOSSOS31UNALLOC system initialization parameter specifies short-on-storage (SOS) thresholds in KB for the total amount of unallocated z/OS® extended user region storage and for the largest contiguous storage area available in it. + + + | **required**: False + | **type**: str + + + + zossos64unalloc + The ZOSSOS64UNALLOC system initialization parameter specifies a short-on-storage (SOS) threshold in MB for the amount of unallocated z/OS® MEMLIMIT storage in the 64-bit addressing range. + + + | **required**: False + | **type**: int + + + + zossosnewtcb + The ZOSSOSNEWTCB system initialization parameter specifies the action that CICS® takes in response to a new open TCB that is being attached directly by CICS when the z/OS® user region storage or extended user region storage is short on storage (SOS). These open TCBs are L8, L9, X8 and X9 TCBs. + + + | **required**: False + | **type**: str + | **choices**: DELAY, NODELAY + + + + +space_primary + The size of the primary space allocated to the CICS startup JCL data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the CICS startup JCL data set is being created. If the CICS startup JCL data set already exists, the option has no effect. + + If this option is not set, the primary space is dynamically calculated based on the size of the generated CICS startup JCL. + + If the target data set is a member in a PDS or PDSE, then this value does not have any effect. + + + | **required**: False + | **type**: int + + + +space_secondary + The size of the secondary space allocated to the CICS startup JCL data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the CICS startup JCL data set is being created. If the CICS startup JCL data set already exists, the option has no effect. + + If this option is not set, the secondary space is dynamically calculated as 10% of the total size of the generated CICS startup JCL. + + If the target data set is a member in a PDS or PDSE, then this value does not have any effect. + + + | **required**: False + | **type**: int + + + +space_type + The unit portion of the CICS startup JCL data set size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the CICS startup JCL data set is being created. If the CICS startup JCL data set already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + If neither \ :literal:`space\_secondary`\ nor \ :literal:`space\_primary`\ is set, then this value does not have any effect. + + If the target data set is a member in a PDS or PDSE, then this value does not have any effect. + + + | **required**: False + | **type**: str + | **default**: M + | **choices**: M, K, CYL, TRK + + + +state + The intended state for the CICS startup JCL data set, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the CICS startup JCL data set entirely, if it already exists. + + Specify \ :literal:`initial`\ to create the CICS startup JCL data set if it does not already exist. + + Specify \ :literal:`warm`\ to retain an existing CICS startup JCL data set in its current state. The module verifies whether the specified data set exists and whether it matches the generated startup JCL. If both conditions are met, the module leaves the data set as is. If the data set does not exist or does not match, the operation fails. + + + | **required**: True + | **type**: str + | **choices**: initial, absent, warm + + + +steplib + Any locations of additional data sets other than \ :literal:`SDFHAUTH`\ , \ :literal:`SDFHLIC`\ , \ :literal:`SCEERUN`\ , or \ :literal:`SCEERUN2`\ , to be added to the STEPLIB concatenation. The STEPLIB concatenation is where you specify the libraries that contain the modules loaded by the z/OS operating system. You can either add data sets at the very top of the list or append them to the bottom of the list. There are other data sets in between, as determined by the defaults or other input parameters; for example, \ :literal:`SEYUAUTH`\ and \ :literal:`SEYULOAD`\ as sepcified with \ :literal:`cpsm\_data\_sets`\ , \ :literal:`SCEERUN`\ and \ :literal:`SCEERUN2`\ as specified with \ :literal:`le\_data\_sets`\ , \ :literal:`SDFHAUTH`\ and \ :literal:`SDFHLIC`\ as specified with \ :literal:`cics\_data\_sets`\ , and so on. + + + | **required**: False + | **type**: dict + + + + data_sets + The \ :literal:`STEPLIB`\ data sets to be added to the bottom of the list. + + + | **required**: False + | **type**: list + + + + top_data_sets + The \ :literal:`STEPLIB`\ data sets to be added to the very top of the list. + + + | **required**: False + | **type**: list + + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + If the target data set is a member in a PDS or PDSE, then this value does not have any effect. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Create CICS startup JCL data set + ibm.ibm_zos_cics.region_jcl: + applid: ABC9ABC1 + cics_data_sets: + template: 'CICSTS61.CICS.<< lib_name >>' + le_data_sets: + template: 'LANG.ENVIORNMENT.<< lib_name >>' + region_data_sets: + template: 'REGIONS.ABC9ABC1.<< data_set_name >>' + sit_parameters: + start: COLD + sit: 6$ + aicons: AUTO + auxtr: 'ON' + auxtrsw: ALL + cicssvc: 217 + csdrecov: BACKOUTONLY + edsalim: 500M + grplist: (DFHLIST,DFHTERML) + gmtext: 'ABC9ABC1. CICS Region' + icvr: 20000 + isc: 'YES' + ircstrt: 'YES' + mxt: 500 + pgaipgm: ACTIVE + sec: 'YES' + spool: 'YES' + srbsvc: 218 + tcpip: 'NO' + usshome: /usshome/directory + wlmhealth: "OFF" + wrkarea: 2048 + sysidnt: ZPY1 + + - name: Create CICS startup JCL data set with more customization + ibm.ibm_zos_cics.region_jcl: + applid: ABC9ABC1 + job_parameters: + class: A + cics_data_sets: + template: 'CICSTS61.CICS.<< lib_name >>' + sdfhauth: 'CICSTS61.OVERRDE.TEMPLT.SDFHAUTH' + le_data_sets: + template: 'LANG.ENVIORNMENT.<< lib_name >>' + region_data_sets: + template: 'REGIONS.ABC9ABC1.<< data_set_name >>' + output_data_sets: + default_sysout_class: B + ceemsg: + sysout: A + sysprint: + omit: True + steplib: + top_data_sets: + - TOP.DATA_SET.ONE + - TOP.DATA_SET.TWO + data_sets: + - BOTTOM.DATA_SET.ONE + sit_parameters: + start: COLD + sit: 6$ + aicons: AUTO + auxtr: 'ON' + auxtrsw: ALL + cicssvc: 217 + csdrecov: BACKOUTONLY + edsalim: 500M + grplist: (DFHLIST,DFHTERML) + gmtext: 'ABC9ABC1. CICS Region' + icvr: 20000 + isc: 'YES' + ircstrt: 'YES' + mxt: 500 + pgaipgm: ACTIVE + stntrxx: + ab: ALL + skrxxxx: + PA21: 'COMMAND' + sec: 'YES' + spool: 'YES' + srbsvc: 218 + tcpip: 'NO' + usshome: /usshome/directory + wlmhealth: "OFF" + wrkarea: 2048 + sysidnt: ZPY1 + + + + + + +See Also +-------- + +.. seealso:: + + - :ref:`stop_cics_module` + + + +Return Values +------------- + + + + + changed + | True if the CICS startup JCL data set was created, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the CICS startup JCL data set before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: Sequential + + + + + + exists + | True if the CICS startup JCL data set exists. + + | **returned**: always + | **type**: bool + + + + + + end_state + | The state of the CICS startup JCL data set at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: Sequential + + + + + + exists + | True if the CICS startup JCL data set exists. + + | **returned**: always + | **type**: bool + + + + + + jcl + | The CICS startup JCL that is built during module execution. + + | **returned**: always + | **type**: list + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard out stream returned by the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable. + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/stop_region.rst b/docs/source/modules/stop_region.rst new file mode 100644 index 00000000..065132c0 --- /dev/null +++ b/docs/source/modules/stop_region.rst @@ -0,0 +1,403 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/stop_region.py + +.. _stop_region_module: + + +stop_region -- Stop a CICS region +================================= + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Stop a CICS region by issuing a CEMT PERFORM SHUTDOWN command, or by canceling the job through the \ :literal:`jobs.cancel`\ utility provided by Z Open Automation Utilities (ZOAU). You can choose the shutdown mode from NORMAL, IMMEDIATE, or CANCEL. +- The \ :literal:`job\_id`\ , \ :literal:`job\_name`\ , or both can be used to shut down a CICS region. If mulitple jobs are running with the same name, the \ :literal:`job\_id`\ is required. +- During a NORMAL or IMMEDIATE shutdown, a shutdown assist transaction should run to enable CICS to shut down in a controlled manner. By default, the CICS-supplied shutdown assist transaction, CESD is used. You can specify a custom shutdown assist transaction in the SDTRAN system initialization parameter. The task runs until the region has successfully shut down, or until the shutdown fails. +- You must have a console installed in the CICS region so that the stop\_region module can communicate with CICS. To define a console, you must install a terminal with the CONSNAME attribute set to your TSO user ID. For detailed instructions, see \ `Defining TSO users as console devices `__\ . Add your console definition into one of the resource lists defined on the GRPLIST system initialization parameter so that it gets installed into the CICS region. Alternatively, you can use a DFHCSDUP script to update an existing CSD. This function is provided by the csd module. +- You can specify a timeout, in seconds, for CICS shutdown processing. After a request to stop CICS is issued, if CICS shutdown processing is not completed when this timeout is reached, the module completes in a failed state. By default, the stop\_region module does not use a timeout, that is, the \ :literal:`timeout`\ parameter assumes a value of -1. + + + + + +Parameters +---------- + + + +job_id + Identifies the job ID belonging to the running CICS region. + + The stop\_region module uses this job ID to identify the state of the CICS region and shut it down. + + + | **required**: False + | **type**: str + + + +job_name + Identifies the job name belonging to the running CICS region. + + The stop\_region module uses this job name to identify the state of the CICS region and shut it down. + + The \ :literal:`job\_name`\ must be unique; if multiple jobs with the same name are running, use \ :literal:`job\_id`\ . + + + | **required**: False + | **type**: str + + + +mode + Specify the type of shutdown to be executed on the CICS region. + + Specify \ :literal:`normal`\ to perform a normal shutdown. This instructs the stop\_region module to issue a CEMT PERFORM SHUTDOWN command. + + Specify \ :literal:`immediate`\ to perform an immediate shutdown. This instructs the stop\_region module to issue a CEMT PERFORM SHUTDOWN IMMEDIATE command. + + Specify \ :literal:`cancel`\ to cancel the CICS region. This instructs the stop\_region module to use ZOAU's \ :literal:`jobs.cancel`\ utility to process the request. + + + | **required**: False + | **type**: str + | **default**: normal + | **choices**: normal, immediate, cancel + + + +no_sdtran + No shutdown assist transaction is to be run at CICS shutdown. + + + | **required**: False + | **type**: bool + + + +sdtran + The 4-character identifier of the shutdown assist transaction. + + The default shutdown transaction, if neither SDTRAN nor NOSDTRAN is specified, is CESD. + + + | **required**: False + | **type**: str + + + +timeout + The maximum time, in seconds, to wait for CICS shutdown processing to complete. + + Specify -1 to exclude a timeout. + + + | **required**: False + | **type**: int + | **default**: -1 + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: "Stop CICS region using job ID" + ibm.ibm_zos_cics.stop_region: + job_id: JOB12345 + + - name: "Stop CICS region immediately using job ID" + ibm.ibm_zos_cics.stop_region: + job_id: JOB12354 + mode: immediate + + - name: "Stop CICS region using job name and job ID" + ibm.ibm_zos_cics.stop_region: + job_id: JOB12354 + job_name: MYREG01 + + - name: "Stop CICS region using job name" + ibm.ibm_zos_cics.stop_region: + job_name: ANS1234 + mode: normal + + - name: "Cancel CICS region using job name" + ibm.ibm_zos_cics.stop_region: + job_name: ANS1234 + mode: cancel + + + + + + + + + +Return Values +------------- + + + + + changed + | True if the PERFORM SHUTDOWN or CANCEL command was executed. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + return + | The standard output returned by the program execution. + + | **returned**: always + | **type**: dict + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the module failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + jobs + | The output information for a list of jobs matching the specified criteria. + + | **returned**: on zos_job_query module execution + | **type**: list + + + + job_id + | Unique job identifier assigned to the job by JES. + + | **type**: str + + + + job_name + | The name of the batch job. + + | **type**: str + + + + owner + | The owner who ran the job. + + | **type**: str + + + + ret_code + | Return code output collected from the job log. + + | **type**: dict + + + + msg + | Return code or abend resulting from the job submission. + + | **type**: str + + + + msg_code + | Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) yields "S0C4". + + | **type**: str + + + + msg_txt + | Returns additional information related to the job. + + | **type**: str + + + + code + | Return code converted to an integer value (when possible). + + | **type**: int + + + + steps + | Series of JCL steps that were executed and their return codes. + + | **type**: list + + + + step_name + | Name of the step shown as "was executed" in the DD section. + + | **type**: str + + + + step_cc + | The CC returned for this step in the DD section. + + | **type**: int + + + + + + + + + + message + | Message returned on failure. + + | **returned**: on zos_job_query module execution + | **type**: str + + + + content + | The resulting text from the command submitted. + + | **returned**: on zos_operator module execution + | **type**: list + + + + cmd + | The operator command that has been executed + + | **returned**: on zos_operator module execution + | **type**: str + + + + rc + | The return code from the operator command + + | **returned**: on zos_operator module execution + | **type**: int + + + + max_rc + | The maximum return code from the TSO status command + + | **returned**: on zos_tso_command module execution + | **type**: int + + + + output + | The output from the TSO command. + + | **returned**: on zos_tso_command module execution + | **type**: list + + + + command + | The executed TSO command. + + | **returned**: always + | **type**: str + + + + rc + | The return code from the executed TSO command. + + | **returned**: always + | **type**: int + + + + content + | The response resulting from the execution of the TSO command. + + | **returned**: always + | **type**: list + + + + lines + | The line number of the content. + + | **returned**: always + | **type**: int + + + + + + + + + + msg + | A string containing an error message if applicable. + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/td_intrapartition.rst b/docs/source/modules/td_intrapartition.rst new file mode 100644 index 00000000..bf22970c --- /dev/null +++ b/docs/source/modules/td_intrapartition.rst @@ -0,0 +1,330 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/td_intrapartition.py + +.. _td_intrapartition_module: + + +td_intrapartition -- Create and remove the CICS transient data intrapartition data set +====================================================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create and remove the \ `transient data intrapartition `__\ data set used by a CICS® region. This data set holds all the data for intrapartition queues. +- You can use this module when provisioning or de-provisioning a CICS region. +- Use the \ :literal:`state`\ option to specify the intended state for the transient data intrapartition data set. For example, use \ :literal:`state=initial`\ to create a transient data intrapartition data set if it doesn't exist. + + + + + +Parameters +---------- + + + +region_data_sets + The location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . + + If you want to use a data set that already exists, ensure that the data set is a transient data intrapartition data set. + + + | **required**: True + | **type**: dict + + + + dfhintra + Overrides the templated location for the transient data intrapartition data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of the transient data intrapartition to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets with a template. + + + | **required**: False + | **type**: str + + + + +space_primary + The size of the primary space allocated to the transient data intrapartition data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the transient data intrapartition data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 100 + + + +space_secondary + The size of the secondary space allocated to the transient data intrapartition data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the transient data intrapartition data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 10 + + + +space_type + The unit portion of the transient data intrapartition data set size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the transient data intrapartition data set is being created. If the data set already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), records (\ :literal:`REC`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + + | **required**: False + | **type**: str + | **default**: REC + | **choices**: M, K, REC, CYL, TRK + + + +state + The intended state for the transient data intrapartition data set, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the transient data intrapartition data set entirely, if it exists. + + Specify \ :literal:`initial`\ to create the transient data intrapartition data set if it does not exist. If the specified data set exists but is empty, the module leaves the data set as is. If the specified data set exists and has contents, the module deletes the data set and then creates a new, empty one. + + Specify \ :literal:`warm`\ to retain an existing transient data intrapartition data set in its current state. The module verifies whether the specified data set exists and whether it contains any records. If both conditions are met, the module leaves the data set as is. If the data set does not exist or if it is empty, the operation fails. + + + | **required**: True + | **type**: str + | **choices**: initial, absent, warm + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Initialize a transient data intrapartition data set by using the templated location + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "initial" + + - name: Initialize a user specified transient data intrapartition data set + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + dfhintra: + dsn: "REGIONS.ABCD0001.DFHINTRA" + state: "initial" + + - name: Initialize a large transient data intrapartition data set by using the templated location + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + space_primary: 50 + space_type: "M" + state: "initial" + + - name: Retain the existing state of a transient data intrapartition data set data set defined by the template + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + + - name: Retain the existing state of a user specified transient data intrapartition data set + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + dfhintra: + dsn: "REGIONS.ABCD0001.DFHINTRA" + state: "warm" + + - name: Delete a transient data intrapartition data set data set defined by the template + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "absent" + + - name: Delete a user specified transient data intrapartition data set + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + dfhintra: + dsn: "REGIONS.ABCD0001.DFHINTRA" + state: "absent" + + + + + + + + + +Return Values +------------- + + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the transient data intrapartition data set before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the specified transient data intrapartition data set exists. + + | **returned**: always + | **type**: bool + + + + + + end_state + | The state of the transient data intrapartition data set at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: VSAM + + + + + + exists + | True if the specified transient data intrapartition data set exists. + + | **returned**: always + | **type**: bool + + + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard output stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable + + | **returned**: always + | **type**: str + + diff --git a/docs/source/modules/transaction_dump.rst b/docs/source/modules/transaction_dump.rst new file mode 100644 index 00000000..f8bf3d49 --- /dev/null +++ b/docs/source/modules/transaction_dump.rst @@ -0,0 +1,413 @@ +.. ............................................................................... +.. © Copyright IBM Corporation 2020,2023 . +.. Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) . +.. ............................................................................... + +:github_url: https://github.com/ansible-collections/ibm_zos_cics/blob/main/plugins/modules/transaction_dump.py + +.. _transaction_dump_module: + + +transaction_dump -- Allocate transaction dump data sets +======================================================= + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Allocates the two \ `transaction dump `__\ data sets used by a CICS® region. +- The two data sets are referred to as transaction dump data set A (DFHDMPA) and transaction dump data set B (DFHDMPB). + + + + + +Parameters +---------- + + + +destination + Identifies which one of the transaction dump data sets is the target of the operation. If the value is left blank, A is implied, but you can specify A or B. + + Specify \ :literal:`A`\ to create or delete the A data set. + + Specify \ :literal:`B`\ to create or delete the B data set. This MUST be set for the creation of the B data set. + + + | **required**: False + | **type**: str + | **default**: A + | **choices**: A, B + + + +region_data_sets + The location of the region data sets to be created by using a template, for example, \ :literal:`REGIONS.ABCD0001.\<\< data\_set\_name \>\>`\ . + + + | **required**: True + | **type**: dict + + + + dfhdmpa + Overrides the templated location for the DFHDMPA data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of DFHDMPA to override the template. + + + | **required**: False + | **type**: str + + + + + dfhdmpb + Overrides the templated location for the DFHDMPB data set. + + + | **required**: False + | **type**: dict + + + + dsn + The data set name of DFHDMPB to override the template. + + + | **required**: False + | **type**: str + + + + + template + The base location of the region data sets with a template. + + + | **required**: False + | **type**: str + + + + +space_primary + The size of the primary space allocated to the transaction dump data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the transaction dump data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 20 + + + +space_secondary + The size of the secondary space allocated to the transaction dump data set. Note that this is just the value; the unit is specified with \ :literal:`space\_type`\ . + + This option takes effect only when the transaction dump data set is being created. If the data set already exists, the option has no effect. + + + | **required**: False + | **type**: int + | **default**: 4 + + + +space_type + The unit portion of the transaction dump data set size. Note that this is just the unit; the value for the primary space is specified with \ :literal:`space\_primary`\ and the value for the secondary space is specified with \ :literal:`space\_secondary`\ . + + This option takes effect only when the transaction dump data set is being created. If the data set already exists, the option has no effect. + + The size can be specified in megabytes (\ :literal:`M`\ ), kilobytes (\ :literal:`K`\ ), cylinders (\ :literal:`CYL`\ ), or tracks (\ :literal:`TRK`\ ). + + + | **required**: False + | **type**: str + | **default**: M + | **choices**: M, K, CYL, TRK + + + +state + The intended state for the transaction dump data set, which the module aims to achieve. + + Specify \ :literal:`absent`\ to remove the transaction dump data set entirely, if it exists. + + Specify \ :literal:`initial`\ to create the transaction dump data set if it does not exist. If the specified data set exists but is empty, the module leaves the data set as is. If the specified data set exists and has contents, the module deletes the data set and then creates a new, empty one. + + Specify \ :literal:`warm`\ to retain an existing transaction dump data set in its current state. The module verifies whether the specified data set exists and whether it contains any records. If both conditions are met, the module leaves the data set as is. If the data set does not exist or if it is empty, the operation fails. + + + | **required**: True + | **type**: str + | **choices**: initial, absent, warm + + + +volumes + The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + + + | **required**: False + | **type**: raw + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Allocate transaction dump data set A (implicit) by using the templated location + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + + - name: Allocate a user specified data set as transaction dump data set A (implicit) + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpa: + dsn: "REGIONS.ABCD0001.DFHDMPA" + state: initial + + - name: Allocate transaction dump data set A by using the templated location + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + destination: A + + - name: Allocate a user specified data set as transaction dump data set A + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpa: + dsn: "REGIONS.ABCD0001.DFHDMPA" + state: initial + destination: A + + - name: Allocate transaction dump data set B by using the templated location + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + destination: B + + - name: Allocate a user specified data set as transaction dump data set B + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpb: + dsn: "REGIONS.ABCD0001.DFHDMPB" + state: initial + destination: B + + - name: Retain the existing state of transaction dump data set A (implicit) defined by the template + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + + - name: Retain the existing state of a user specified transaction dump data set A (implicit) + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpa: + dsn: "REGIONS.ABCD0001.DFHDMPA" + state: "warm" + + - name: Retain the existing state of transaction dump data set B defined by the template + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + destination: B + + - name: Retain the existing state of a user specified transaction dump data set B + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpb: + dsn: "REGIONS.ABCD0001.DFHDMPB" + state: "warm" + destination: B + + - name: Delete transaction dump data set A (implicit) defined by the template + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: absent + + - name: Delete a user specified transaction dump data set A (implicit) + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpa: + dsn: "REGIONS.ABCD0001.DFHDMPA" + state: absent + + - name: Delete transaction dump data set B defined by the template + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: absent + destination: B + + - name: Delete a user specified transaction dump data set B + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpb: + dsn: "REGIONS.ABCD0001.DFHDMPB" + state: absent + destination: B + + + + + + + + + +Return Values +------------- + + + + + changed + | True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + + + + failed + | True if the Ansible task failed, otherwise False. + + | **returned**: always + | **type**: bool + + + + start_state + | The state of the transaction dump data set before the Ansible task runs. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the start of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: Sequential + + + + + + exists + | True if the specified transaction dump data set exists. + + | **returned**: always + | **type**: bool + + + + + + end_state + | The state of the transaction dump data set at the end of the Ansible task. + + | **returned**: always + | **type**: dict + + + + data_set_organization + | The organization of the data set at the end of the Ansible task. + + | **returned**: always + | **type**: str + | **sample**: Sequential + + + + + + exists + | True if the specified transaction dump data set exists. + + | **returned**: always + | **type**: bool + + + + + + executions + | A list of program executions performed during the Ansible task. + + | **returned**: always + | **type**: list + + + + name + | A human-readable name for the program execution. + + | **returned**: always + | **type**: str + + + + rc + | The return code for the program execution. + + | **returned**: always + | **type**: int + + + + stdout + | The standard output stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + stderr + | The standard error stream returned from the program execution. + + | **returned**: always + | **type**: str + + + + + + msg + | A string containing an error message if applicable + + | **returned**: always + | **type**: str + + diff --git a/docs/source/playbooks.rst b/docs/source/playbooks.rst index f058e8a6..26808629 100644 --- a/docs/source/playbooks.rst +++ b/docs/source/playbooks.rst @@ -9,6 +9,11 @@ Playbooks There are sample playbooks that demonstrate the **IBM z/OS CICS collection** functionality in the `samples repository`_. +The sample playbooks fall into two categories: + +- Operations on CICS and CICSPlex SM resources and definitions. The sample playbooks use the CMCI modules to achieve various real-life use cases. +- CICS provisioning. The sample playbooks demonstrate how a set of modules for provisioning and managing CICS TS data sets and utilities can be used to provision and deprovision a CICS region and to perform CICS startup or shutdown operations. + .. _samples repository: https://github.com/IBM/z_ansible_collections_samples @@ -28,8 +33,8 @@ to reference your CICS artifacts and configuration. You can find the playbook content that is included with the collection in the same location where the collection is installed. For more information, refer to -the `installation documentation`_. In the following examples, this document will -refer to the installation path as ``~/.ansible/collections/ibm/ibm_zos_cics``. +the `installation documentation`_. In the following examples, this document +refers to the installation path as ``~/.ansible/collections/ibm/ibm_zos_cics``. .. _Ansible playbook: @@ -81,13 +86,13 @@ Inventory --------- Ansible works with multiple managed nodes (hosts) at the same time, using a -list or group of lists known as an `inventory`_. Once the inventory is defined, +list or group of lists known as an `inventory`_. After the inventory is defined, you can use `patterns`_ to select the hosts or groups that you want Ansible to run against. -Included in the CICS `deploy program sample`_ is an example `inventory file`_ -which shows how host information is supplied to Ansible. It looks like the -following: +Included in the CICS `deploy program sample`_ is an example `inventory file`_, +which shows how host information is supplied to Ansible. Code that defines a host +is shown below: .. code-block:: yaml @@ -98,24 +103,16 @@ following: ansible_user: zos_target_username ansible_python_interpreter: path_to_python_interpreter_binary_on_zos_target +A host is defined by the following properties: -The value for the property **ansible_host** is the hostname of the managed node; -for example, ``ansible_host: example.com`` - -The value for the property **zos_target_username** is the user name to use when -connecting to the host; for example, ``ansible_user: ibmuser``. +- **ansible_host**: The value of this property identifies the hostname of the managed node. For example: ``ansible_host: example.com`` +- **zos_target_username**: The value of this property identifies the user name to use when connecting to the host. For example: ``ansible_user: ibmuser`` +- **ansible_python_interpreter**: The value of this property specifies the Python path for the target host. For example: ``ansible_python_interpreter: /usr/lpp/rsusr/python39/bin/python`` + This is useful for systems with more than one Python installation, or when Python is not installed in the default location **/usr/bin/python**. -The value for the property **ansible_python_interpreter** is the target host -Python path. This is useful for systems with more than one Python installation, -or when Python is not installed in the default location **/usr/bin/python**; -for example, ``ansible_python_interpreter: /usr/lpp/rsusr/python39/bin/python`` +For more information about the Python configuration requirements on z/OS, see the Ansible `FAQ`_. -For more information on Python configuration requirements on z/OS, refer to -Ansible `FAQ`_. - -Behavioral inventory parameters such as ``ansible_port`` which allows you -to set the port for a host can be reviewed in the -`behavioral inventory parameters`_. +For behavioral inventory parameters such as ``ansible_port`` which allows you to set the port for a host, see `behavioral inventory parameters`_. .. _inventory: https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html @@ -139,27 +136,16 @@ Although you can store variables in the inventory file, storing separate host and group variables files may help you organize your variable values more easily. An example of one of these variable files is the `zos_host.yml`_ file included with the `deploy_program sample`_, which is used to provide the -required environment variables. - -The value for the property **BPXK_AUTOCVT** must be configured to ``ON``. - -The value for the property **ZOAU_HOME** is the ZOA Utilities install root path; -for example, ``/usr/lpp/IBM/zoautil``. +required environment variables. Another such example is the `variables.yml`_ file +included with the `CICS provisioning`_ playbooks. -The value for the property **PYTHONPATH** is the ZOA Utilities Python library -path; for example, ``/usr/lpp/IBM/zoautil/lib/``. +The properties that define the environment variables are as follows: -The value for the property **LIBPATH** is both the path to the Python libraries -on the target and the ZOA Utilities Python library path separated by -colons ``:``; for example, -``/usr/lpp/IBM/zoautil/lib/:/usr/lpp/rsusr/python39/lib:/lib:/usr/lib:.``. - -The value for the property **PATH** is the ZOA utilities BIN path and the Python -interpreter path; for example, -``/usr/lpp/IBM/zoautil/bin:/usr/lpp/rsusr/python39/bin/python:/bin``. - -The included sample variables file (zos_host.yml) contains variables specific to -the playbook as well as the following: +- **BPXK_AUTOCVT**: The value must be ``ON``. +- **ZOAU_HOME**: The value of this property identifies the ZOA Utilities install root path. For example: ``/usr/lpp/IBM/zoautil`` +- **PYTHONPATH**: The value of this property identifies the ZOA Utilities Python library path. For example: ``/usr/lpp/IBM/zoautil/lib/`` +- **LIBPATH**: The value of this property specifies both the path to the Python libraries on the target and the ZOA Utilities Python library path, separated by colons ``:``. For example: ``/usr/lpp/IBM/zoautil/lib/:/usr/lpp/rsusr/python39/lib:/lib:/usr/lib:.`` +- **PATH**: The value of this property identifies the ZOA utilities BIN path and the Python interpreter path, separated by colons ``:``. For example: ``/usr/lpp/IBM/zoautil/bin:/usr/lpp/rsusr/python39/bin/python:/bin`` .. code-block:: yaml @@ -172,23 +158,30 @@ the playbook as well as the following: .. note:: In ZOAU 1.0.2 and later, the property **ZOAU_ROOT** is no longer supported - and can be replaced with the property **ZOAU_HOME**. If you are using ZOAU - version 1.0.1 or lower, you must continue to use the property - **ZOAU_ROOT** which is the ZOA Utilities install root path required for + and can be replaced with the property **ZOAU_HOME**. + + If you are using ZOAU version 1.0.1 or lower, you must continue to use the property + **ZOAU_ROOT**, which is the ZOA Utilities install root path required for ZOAU; for example, ``/usr/lpp/IBM/zoautil``. .. _zos_host.yml: https://github.com/IBM/z_ansible_collections_samples/blob/main/zos_subsystems/cics/cmci/deploy_program/host_vars/zos_host.yml .. _deploy_program sample: https://github.com/IBM/z_ansible_collections_samples/blob/main/zos_subsystems/cics/cmci/deploy_program +.. _variables.yml: + https://github.com/IBM/z_ansible_collections_samples/blob/main/zos_subsystems/cics/provisioning/host_vars/variables.yml +.. _CICS provisioning: + https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_subsystems/cics/provisioning Module Defaults --------------- -Ansible has a module defaults feature to use the same values during every use of -a module, rather than repeating them everytime. Here we can set the host url and +Ansible has a module defaults feature, which allows you to use the same values during every use of +a module, rather than repeating them everytime. + +For example, when using CMCI modules to manage CICS and CICSPlex SM resources and definitions, you can set the host url and credentials of the **cmci_get** module to be the same throughout the playbook. .. code-block:: yaml @@ -200,20 +193,49 @@ credentials of the **cmci_get** module to be the same throughout the playbook. cmci_password: "{{ cmci_password }}" -If you wish to use the same values in **all** CMCI modules, you can assign them -to the group called **cmci_group**. +If you want to use the same values in **all** CMCI modules, you can assign them +to the group called **cmci**. .. code-block:: yaml module_defaults: - group/ibm.ibm_zos_cics.cmci_group: + group/ibm.ibm_zos_cics.cmci: cmci_host: "my.system.host" cmci_port: "system.port.number" cmci_user: "my.username" cmci_password: "my.password" + +Likewise, you can easily apply a default set of CICS TS data sets and utilities for the provisioning or de-provisioning of CICS regions. +If you want to use the same values in **all** CICS TS data set provisioning modules, you can assign them to the group called **region**. +The following **module_defaults** example illustrates the use of a templated location for some data sets and a user-specified name for +some other data sets instead of the template. + +.. code-block:: yaml + + module_defaults: + group/ibm.ibm_zos_cics.region: + state: initial + cics_data_sets: + template: "CTS610.CICS740.<< data_set_name >>" + sdfhauth: "CICSTS61.OVERRDE.TEMPLT.SDFHAUTH" + region_data_sets: + template: "{{ansible_user}}.REGIONS.{{applid}}.<< data_set_name >>" + dfhgcd: "REGIONS.{{applid}}.GCD" + +The **cics_data_sets** parameter defines a defaults group through which you can specify the location of a CICS installation. It is used to define +the data set names of the SDFHAUTH, SDFHLOAD and SDFHLIC libraries. These libraries can be used by multiple CICS regions. In this example, the SDFHLOAD +and SDFHLIC libraries are created by default using the templated location of ``CTS610.CICS740.<< data_set_name >>``, so their data set names are +``CTS610.CICS740.SDFHLOAD`` and ``CTS610.CICS740.SDFHLIC`` respectively. However, the SDFHAUTH library is created with the data set name of +``CICSTS61.OVERRDE.TEMPLT.SDFHAUTH``, overriding the template. + +The **region_data_sets** parameter defines a defaults group through which you can specify a high level qualifier for the data sets that are used by a +single CICS region. In this example, all the region data sets except DFHGCD are created by default using the templated location of +``{{ansible_user}}.REGIONS.{{applid}}.<< data_set_name >>``, while DFHGCD is created with the data set name of ``REGIONS.{{applid}}.GCD``, overriding the template. + + .. note:: - Group module defaults are only available in ``ansible-core`` 2.12 or later. If + Group module defaults are available in ``ansible-core`` 2.12 or later. If this syntax is used with ``ansible-core`` 2.11 or earlier, the values are perceived as not present, and a 'missing required arguments' error is thrown. @@ -226,22 +248,30 @@ Access the `collection samples repository`_ and ensure you have navigated to the directory containing the playbook you want to run. For example: ``zos_subsystems/cics/cmci/deploy_program/``. -Use the Ansible command ``ansible-playbook`` to run the sample playbook. The -command syntax is ``ansible-playbook -i `` which, using -the example above of ``deploy_program``, is -``ansible-playbook -i inventory deploy_program.yaml``. +Use the Ansible command ``ansible-playbook`` to run the sample playbook. + +**Command Syntax** + +``ansible-playbook -i `` + +Assuming the example above of ``deploy_program``, the command to issue is as follows: + +``ansible-playbook -i inventory deploy_program.yaml`` This command assumes that the controller's public SSH key has been shared with the managed node. If you want to avoid entering a username and password each -time, copy the SSH public key to the managed node using the ``ssh-copy-id`` -command; for example, ``ssh-copy-id -i ~/.ssh/mykey.pub user@``. +time, copy the SSH public key to the managed node by using the ``ssh-copy-id`` +command, as shown in the following example: + +``ssh-copy-id -i ~/.ssh/mykey.pub user@`` + +Alternatively, you can use the ``--ask-pass`` option, as shown in the following example, so that +the user is prompted to enter a connection password each time a playbook is run. -Alternatively, you can use the ``--ask-pass`` option to be prompted for the -user's password each time a playbook is run; for example, -``ansible-playbook -i inventory deploy_program.yaml --ask-pass``. +``ansible-playbook -i inventory deploy_program.yaml --ask-pass`` .. note:: - * Using ``--ask-pass`` is not recommended because it will hinder performance. + * Using ``--ask-pass`` is not recommended because it hinders performance. * Using ``--ask-pass`` requires ``sshpass`` be installed on the controller. For further reference, see the `ask-pass documentation`_. @@ -254,15 +284,15 @@ ERROR, DEBUG. .. note:: It is a good practice to review the playbook samples before executing them. - It will help you understand what requirements in terms of space, location, - names, authority, and artifacts will be created and cleaned up. Although + This helps you understand what requirements are expected in terms of space, location, + names, authority, and artifacts that are created and cleaned up. Although samples are always written to operate without the need for the user's configuration, flexibility is written into the samples because it is not - easy to determine if a sample has access to the host's resources. - Review the playbook notes sections for additional details and + easy to determine whether a sample has access to the host's resources. + Review the notes sections in the playbooks for additional details and configuration. - Playbooks often submit JCL that is included in the samples repository + Playbooks often submit a JCL that is included in the samples repository under the `files directory`_. Review the sample JCL for necessary edits to allow for submission on the target system. The most common changes are to add a CLASS parameter and change the NOTIFY user parameter. For more details, @@ -273,4 +303,4 @@ ERROR, DEBUG. .. _collection samples repository: https://github.com/IBM/z_ansible_collections_samples .. _files directory: - https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_basics/constructs/files + https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_basics/constructs/files \ No newline at end of file diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index eda78f78..6c9c2095 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -7,20 +7,139 @@ Releases ======== -Version 2.0.0 -====== +Version 2.1.0 +============= +What's New +------------------- + +**New modules** + +**General Availability of CICS provisioning modules.** You can use these Ansible modules to create automation tasks that provision or deprovision, and start or stop +a CICS region. Sample playbooks show you how to do this with the latest version of the Ansible IBM z/OS CICS collection. All modules were initially released +with Version 1.1.0-beta as noted below. Subsequent Version 1.1.0-beta releases may include enhancements and bugfixes for these modules. Refer to the What's new +of Version 1.1.0-beta releases for details. + +You can use the following modules for provisioning and managing CICS TS data sets: + +* ``aux_temp_storage`` for the CICS auxiliary temporary storage data set. This module was initially + released as ``auxiliary_temp`` with Version 1.1.0-beta.4. The module is changed to ``aux_temp_storage`` in Version 2.1.0. +* ``aux_trace`` for the CICS auxiliary trace data sets. This module was initially released as ``trace`` with Version 1.1.0-beta.4. + The module is changed to ``aux_trace`` in Version 2.1.0. +* ``csd`` for the CICS system definition data set. This module was initially released with Version 1.1.0-beta.4. +* ``global_catalog`` for the CICS global catalog data set. This module was initially released with Version 1.1.0-beta.4. +* ``local_request_queue`` for the CICS local request queue data set. This module was initially released with Version 1.1.0-beta.3. +* ``td_intrapartition`` for the CICS transient data intrapartition data set. This module was initially released as ``intrapartition`` with + Version 1.1.0-beta.4. The module is changed to ``td_intrapartition`` in Version 2.1.0. +* ``transaction_dump`` for the CICS transaction dump data sets. This module was initially released with Version 1.1.0-beta.4. + +You can use the following modules for CICS startup and shutdown operations: + +* ``region_jcl`` - Create a CICS startup JCL data set. This module replaces ``start_cics``, which was released with Version 1.1.0-beta.5. + ``region_jcl`` is significantly different from ``start_cics`` in function. ``region_jcl`` creates a data set that contains the startup JCL, but + doesn't perform the actual startup processing. ``region_jcl`` also supports definition and allocation of user data sets with the ``user_data_sets`` parameter. +* ``stop_region`` - Stop a CICS region. This module was initially released as ``stop_cics`` with Version 1.1.0-beta.5. The module is changed to ``stop_region`` + in Version 2.1.0. In Version 2.1.0, ``stop_region`` supports a new input parameter, ``job_name`` so that you can use the job name, which is typically the CICS's + APPLID, to identify a running CICS region. + +The group name for the CICS provisioning modules is ``region``. However, in the Version 1.1.0-beta releases, the group name was ``region_group``. + +CICS provisioning modules provide support for all in-service CICS TS releases including the latest CICS TS 6.2. + +**Changed modules** + +The group name for the CMCI modules is changed to ``cmci`` instead of ``cmci_group``. ``cmci_group`` is deprecated. + +**New playbooks** -What's new ---------------- +Sample playbooks are available at the `samples repository`_. The CICS provisioning playbook samples demonstrate how to configure and allocate the required +data sets to provision and start a CICS region, with or without SMSS support. The deprovisioning sample shows how to stop a running region and delete all +the associated data sets. + +Version 2.0.0 +============= +What's New +------------------- * **Removed support for Python 2.7.** Python 2.7 is no longer supported as the managed node runtime. +Version 1.1.0-beta.5 +============= +What's New +------------------- -Version 1.0.6 +**New modules** + +* ``start_cics`` - Start a CICS region. +* ``stop_cics`` - Stop a CICS region. + +**Changed modules** + +* ``csd`` - A new ``state`` option, ``script`` is introduced so that you can now supply a script that contains ``CSDUP`` commands to update an existing CSD. The script can be either a data set or a z/OS UNIX file. +* All modules for CICS region data sets - New option ``space_secondary`` is introduced so that you can specify the size of the secondary extent. +* All modules for CICS region data sets - Return values now use ``data_set_organization`` to indicate the organization of the data set. The ``vsam`` field has been removed from the return structure. + + +Version 1.1.0-beta.4 ============= What's New ------------------- -* Bug fix that allows CICSPlex SM Scope and Context to contain special characters '$', '@', and '#'. + +**New modules** + +* ``auxiliary_temp`` - Create and remove the CICS auxiliary temporary storage data set. +* ``csd`` - Create, remove, and manage the CICS system definition data set. +* ``intrapartition`` - Create and remove the CICS transient data intrapartition data set. +* ``trace`` - Allocate the CICS auxiliary trace data sets. +* ``transaction_dump`` - Allocate the CICS transaction dump data sets. + +**Changed modules** + +* ``local_request_queue`` - New option ``warm`` added to the ``state`` input parameter. + +**Bugfixes** + +* ``local_request_queue`` and ``local_request_queue`` - The behavior of these modules with ``state`` set to ``initial`` is updated to match documentation. + +Version 1.1.0-beta.3 +============= +What's New +------------------- + +**New modules** + +* ``local_request_queue`` - Create and remove the CICS local request queue data set. + +**Changed modules** + +* ``global_catalog`` and ``local_catalog`` - Added support for the ``region_data_sets`` and ``cics_data_sets`` defaults groups. This enhancement changes the way you specify the data set location for these modules. + +Version 1.1.0-beta.2 +============= +What's New +------------------- + +**New modules** + +* ``local_catalog`` - Create, initialize, and manage the CICS local catalog data set. + +**Changed modules** + +* ``global_catalog`` - Added return values ``start_state``, ``end_state``, and ``executions``. + +**Bugfixes** + +* ``global_catalog`` - Fixed an issue that when input parameters were lowercase, the module failed. Now these input parameters are not case sensitive. +* ``global_catalog`` - Fixed an issue that was found in the ``changed`` flag. Now the ``changed`` flag corresponds with the actions taken during the ``global_catalog`` execution. + + +Version 1.1.0-beta.1 +============= +What's New +------------------- + +**New modules** + +* ``global_catalog`` - Create, initialize, and manage the CICS global catalog data set. Version 1.0.5 diff --git a/docs/source/requirements.rst b/docs/source/requirements.rst index d135f666..133ffa5b 100644 --- a/docs/source/requirements.rst +++ b/docs/source/requirements.rst @@ -11,12 +11,15 @@ The nodes listed below require these specific versions of software: Control node ------------ -A control node is any machine with Ansible® installed. You can run commands and playbooks from a control noede, be it a laptop, desktop, or server. The following software must be installed on the control node. +A control node is any machine with Ansible® installed. You can run commands and playbooks from a control node, be it a laptop, desktop, or server. .. note:: The IBM® z/OS® CICS® collection cannot run on a Windows system. -* `Ansible version`_: 2.14 or later -* `Python`_: 3.9 or later +The following software must be installed on the control node: + +* `Ansible version`_ 2.14 or later +* `Python`_ 3.9 or later +* z/OS core collection 1.5.0 or later, if you want to use the provisioning tasks provided by the **IBM® z/OS® CICS® collection** .. _Ansible version: diff --git a/docs/source/requirements_managed.rst b/docs/source/requirements_managed.rst index d87eeb9a..38b6312d 100644 --- a/docs/source/requirements_managed.rst +++ b/docs/source/requirements_managed.rst @@ -5,6 +5,13 @@ Requirements of managed nodes ============================= +The tasks in the **IBM® z/OS® CICS® collection** can be classified into two types, +**CMCI tasks** and **provisioning tasks**, that have different requirements of the managed +node. + +CMCI tasks +---------- + The CMCI tasks in the **IBM® z/OS® CICS® collection** interact with the managed node over an HTTP connection by leveraging the `CMCI REST API`_. Therefore, an SSH connection is not required. Instead, you can delegate @@ -15,7 +22,8 @@ For more ways of delegating tasks, see `Controlling where tasks run`_. The requirements of the managed node are as follows: -* IBM CICS V4.2 or later +* z/OS Version 2.3 or later +* All IBM CICS TS releases that are in service * A `CMCI connection`_ must be set up in either a CICSplex or a stand-alone CICS region * Python module dependencies: @@ -31,24 +39,19 @@ The requirements of the managed node are as follows: pip install requests xmltodict - You can also install them using the playbook. For example, this `CICS - sample playbook`_ shows how you can ensure the pre-requisites are installed before the module is executed. + sample playbook`_ shows how you can ensure that the prerequisites are installed before the module is executed. .. _requests: https://pypi.org/project/requests/ .. _xmltodict: https://pypi.org/project/xmltodict/ - .. _CICS sample playbook: https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_subsystems/cics/cmci/reporting - -If you use the CICS collection in conjunction with other IBM z/OS collections, -your managed node must also follow the requirements of those collections, for example, `IBM z/OS core managed node requirements`_. - -If you use the CICS collection alone but don't delegate the CICS tasks to your localhost, your managed node must also follow the `IBM z/OS core managed node requirements`_ except that IBM Z Open Automation Utilities (ZOAU) is not required. +If you use the CMCI tasks in the CICS collection but don't delegate the CMCI tasks to your localhost, your +managed node must also follow the `IBM z/OS core managed node requirements`_ except that IBM Z Open Automation Utilities (ZOAU) is not required. .. _z/OS OpenSSH: https://www.ibm.com/docs/en/zos/latest?topic=descriptions-zos-openssh @@ -63,3 +66,25 @@ If you use the CICS collection alone but don't delegate the CICS tasks to your l https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/requirements_managed.html .. _Controlling where tasks run: https://docs.ansible.com/ansible/latest/user_guide/playbooks_delegation.html#delegating-tasks + + +Provisioning tasks +------------------ + +The provisioning tasks in the **IBM® z/OS® CICS® collection** interact with a +z/OS managed node over SSH, and therefore have different requirements to the +CMCI tasks. The provisioning modules follow the requirements of the other z/OS +collections as documented in `IBM z/OS core managed node requirements`_. These +requirements include installation of the following components: + +* z/OS Version 2.3 or later +* z/OS OpenSSH +* IBM Open Enterprise SDK for Python (previously IBM Open Enterprise Python for z/OS) +* IBM Z Open Automation Utilities (ZOAU) 1.2.x +* The z/OS shell + +For specific versions of these dependencies and additional information, review +the `IBM z/OS core managed node requirements`_ page. + +Note that you must have z/OS core collection 1.5.0 or later installed in the control node +if you want to run the provisioning tasks. diff --git a/galaxy.yml b/galaxy.yml index d3b80a7c..7efd517f 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -1,5 +1,5 @@ --- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2024 # IBM collection namespace namespace: ibm @@ -8,7 +8,7 @@ namespace: ibm name: ibm_zos_cics # The collection version -version: 2.0.0 +version: 2.1.0 # Collection README file readme: README.md @@ -19,6 +19,7 @@ authors: - Tom Latham - Sophie Green - Ya Qing Chen + - Andrew Twydell # Description description: The Red Hat Ansible Certified Content for IBM Z CICS collection includes connection plugins, action plugins, @@ -28,6 +29,7 @@ description: license: [Apache-2.0] # Tags tags: [ibm, z, zos, z_os, cics, cmci, infrastructure] + # Collections that this collection requires to be installed for it to be usable. # dependencies: {} # This collection depends on no other collections @@ -69,3 +71,6 @@ build_ignore: - .whitesource - ibm-ibm_zos_cics-*.tar.gz - changelogs + - .devcontainer + - .vscode + - .secrets.baseline diff --git a/meta/ibm_zos_cics_meta.yml b/meta/ibm_zos_cics_meta.yml index c1f9bc39..2a2ed500 100644 --- a/meta/ibm_zos_cics_meta.yml +++ b/meta/ibm_zos_cics_meta.yml @@ -1,6 +1,6 @@ --- name: ibm_zos_cics -version: 2.0.0 +version: 2.1.0 managed_requirements: - name: Python version: ">=3.9" diff --git a/meta/runtime.yml b/meta/runtime.yml index ebcf400d..eef4052a 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,5 +1,5 @@ --- -requires_ansible: ">=2.14.0" +requires_ansible: ">=2.15.0" action_groups: cmci_group: - cmci_action @@ -7,3 +7,19 @@ action_groups: - cmci_delete - cmci_get - cmci_update + cmci: + - cmci_action + - cmci_create + - cmci_delete + - cmci_get + - cmci_update + region: + - aux_temp_storage + - aux_trace + - csd + - region_jcl + - global_catalog + - local_catalog + - local_request_queue + - td_intrapartition + - transaction_dump diff --git a/plugins/action/aux_temp_storage.py b/plugins/action/aux_temp_storage.py new file mode 100644 index 00000000..77fe85a0 --- /dev/null +++ b/plugins/action/aux_temp_storage.py @@ -0,0 +1,19 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import _DataSetActionPlugin + + +class ActionModule(_DataSetActionPlugin): + def run(self, tmp=None, task_vars=None): + return super(ActionModule, self)._run( + ds_name="dfhtemp", + module_name="aux_temp_storage", + cics_data_sets_required=False, + tmp=tmp, + task_vars=task_vars, + ) diff --git a/plugins/action/aux_trace.py b/plugins/action/aux_trace.py new file mode 100644 index 00000000..fcf46b5b --- /dev/null +++ b/plugins/action/aux_trace.py @@ -0,0 +1,24 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import _DataSetActionPlugin + + +class ActionModule(_DataSetActionPlugin): + def run(self, tmp=None, task_vars=None): + module_args = self._task.args.copy() + + ds_name = "dfhauxt" + if module_args.get("destination", "").upper().strip() == "B": + ds_name = "dfhbuxt" + + return super(ActionModule, self)._run( + ds_name=ds_name, + module_name="aux_trace", + cics_data_sets_required=False, + tmp=tmp, + task_vars=task_vars, + ) diff --git a/plugins/action/csd.py b/plugins/action/csd.py new file mode 100644 index 00000000..c1d4ea77 --- /dev/null +++ b/plugins/action/csd.py @@ -0,0 +1,28 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import _DataSetActionPlugin +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.csd import LOCAL, INPUT_CONTENT, INPUT_LOCATION, INPUT_SOURCE + + +class ActionModule(_DataSetActionPlugin): + def run(self, tmp=None, task_vars=None): + return super(ActionModule, self)._run( + ds_name="dfhcsd", + module_name="csd", + cics_data_sets_required=True, + tmp=tmp, + task_vars=task_vars, + ) + + def _process_module_args(self, module_args, _templar, ds_name, task_vars, cics_data_sets_required): + super(ActionModule, self)._process_module_args(module_args, _templar, ds_name, task_vars, cics_data_sets_required) + input_location = module_args.get(INPUT_LOCATION) + if input_location == LOCAL: + input_src = module_args.get(INPUT_SOURCE) + with open(input_src, 'r') as input_file: + module_args[INPUT_CONTENT] = input_file.read() diff --git a/plugins/action/global_catalog.py b/plugins/action/global_catalog.py new file mode 100644 index 00000000..41e1f707 --- /dev/null +++ b/plugins/action/global_catalog.py @@ -0,0 +1,19 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import _DataSetActionPlugin + + +class ActionModule(_DataSetActionPlugin): + def run(self, tmp=None, task_vars=None): + return super(ActionModule, self)._run( + ds_name="dfhgcd", + module_name="global_catalog", + cics_data_sets_required=True, + tmp=tmp, + task_vars=task_vars, + ) diff --git a/plugins/action/local_catalog.py b/plugins/action/local_catalog.py new file mode 100644 index 00000000..0218c691 --- /dev/null +++ b/plugins/action/local_catalog.py @@ -0,0 +1,19 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import _DataSetActionPlugin + + +class ActionModule(_DataSetActionPlugin): + def run(self, tmp=None, task_vars=None): + return super(ActionModule, self)._run( + ds_name="dfhlcd", + module_name="local_catalog", + cics_data_sets_required=True, + tmp=tmp, + task_vars=task_vars, + ) diff --git a/plugins/action/local_request_queue.py b/plugins/action/local_request_queue.py new file mode 100644 index 00000000..35544e59 --- /dev/null +++ b/plugins/action/local_request_queue.py @@ -0,0 +1,19 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import _DataSetActionPlugin + + +class ActionModule(_DataSetActionPlugin): + def run(self, tmp=None, task_vars=None): + return super(ActionModule, self)._run( + ds_name="dfhlrq", + module_name="local_request_queue", + cics_data_sets_required=False, + tmp=tmp, + task_vars=task_vars, + ) diff --git a/plugins/action/region_jcl.py b/plugins/action/region_jcl.py new file mode 100644 index 00000000..7dd224e3 --- /dev/null +++ b/plugins/action/region_jcl.py @@ -0,0 +1,78 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.plugins.action import ActionBase +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import ( + CPSM_DS_KEYS, + LE_DS_KEYS, + REGION_DS_KEYS, + CICS_DS_KEYS, + LIBRARY_KEYS, + _process_libraries_args, + _process_region_data_set_args, + _set_top_libraries_key, + _validate_list_of_data_set_lengths +) + +MODULE_NAME = 'ibm.ibm_zos_cics.region_jcl' + + +class ActionModule(ActionBase): + def run(self, tmp=None, task_vars=None): + super(ActionModule, self).run(tmp, task_vars) + self.module_args = self._task.args.copy() + + return_structure = { + "failed": False, + "changed": False, + "msg": "", + "executions": [], + "jcl": "", + "start_state": {"data_set_organization": "NONE", "exists": False}, + "end_state": {"data_set_organization": "NONE", "exists": False}, + } + + try: + _process_module_args(self.module_args, self._templar, task_vars) + except (KeyError, ValueError) as e: + return_structure.update({ + "failed": True, + "msg": e.args[0], + }) + else: + return_structure.update( + self._execute_module( + module_name=MODULE_NAME, + module_args=self.module_args, + task_vars=task_vars, + tmp=tmp, + ) + ) + return return_structure + + +def _process_module_args(module_args, _templar, task_vars): + for library_key in LIBRARY_KEYS: + _set_top_libraries_key(module_args, library_key) + _validate_list_of_data_set_lengths(module_args[library_key]["top_data_sets"]) + _validate_list_of_data_set_lengths(module_args[library_key].get("data_sets", [])) + + for cics_lib in CICS_DS_KEYS: + _process_libraries_args(module_args, _templar, task_vars, "cics_data_sets", cics_lib) + + for region_ds in REGION_DS_KEYS: + _process_region_data_set_args(module_args, _templar, region_ds, task_vars) + # Template field in region_data_sets needs to be removed before module execution + if module_args["region_data_sets"].get("template"): + del module_args["region_data_sets"]["template"] + + for le_lib in LE_DS_KEYS: + _process_libraries_args(module_args, _templar, task_vars, "le_data_sets", le_lib) + + # Optional argument + if module_args.get("cpsm_data_sets"): + for cpsm_lib in CPSM_DS_KEYS: + _process_libraries_args(module_args, _templar, task_vars, "cpsm_data_sets", cpsm_lib) diff --git a/plugins/action/stop_region.py b/plugins/action/stop_region.py new file mode 100644 index 00000000..e4ce921f --- /dev/null +++ b/plugins/action/stop_region.py @@ -0,0 +1,391 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +import time +import re +import logging +from ansible.plugins.action import ActionBase +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.stop_region import ( + JOB_ID, + MODE, + IMMEDIATE, + CANCEL, + SDTRAN, + NO_SDTRAN, + JOB_NAME, + TIMEOUT, + TIMEOUT_DEFAULT, +) +from ansible.errors import AnsibleActionFail +from datetime import datetime, timedelta + +logging.basicConfig(level=logging.DEBUG) + +ACTIVE_AND_WAITING = "CICS is still active... waiting for successful shutdown." +CANCEL_REGION = "CANCEL {0}" +CHECK_CICS_STATUS = "Checking status of job {0}" +EXECUTIONS = "executions" +DEFAULT_SHUTDOWN = "MODIFY {0},CEMT PERFORM SHUTDOWN" +FAILED = "failed" +CHANGED = "changed" +MSG = "msg" +EXECUTING = "EXECUTING" +STATUS = "status" +IMMEDIATE_SHUTDOWN = "MODIFY {0},CEMT PERFORM SHUTDOWN IMMEDIATE" +STOP_MODULE_NAME = "ibm.ibm_zos_cics.stop_region" +NAME = "name" +RC = "rc" +RETURN = "return" +RUNNING_ATTEMPTING_TO_STOP = "CICS is running, attempting to stop CICS." +SHUTDOWN_SUCCESS = "CICS has been shutdown." +SDTRAN_COMMAND = "{0} SDTRAN({1})" +NO_SDTRAN_COMMAND = "{0} NOSDTRAN" +TSO_STATUS_COMMAND = "STATUS {0}" +TSO_STATUS_ID_COMMAND = "STATUS {0}({1})" + + +class ActionModule(ActionBase): + def run(self, tmp=None, task_vars=None): + self._setup(tmp, task_vars) + self._execute_stop_module() + + if self.failed: + return self.get_result() + + self._parse_module_params() + + try: + self._get_job_data() + except AnsibleActionFail as e: + self.failed = True + return self.get_result(e.args[0]) + + if not self.job_id or not self.job_name or self.job_status != EXECUTING: + return self.get_result() + + self.logger.debug(RUNNING_ATTEMPTING_TO_STOP) + try: + if self.stop_mode == CANCEL: + self._cancel_region() + else: + self._perform_shutdown() + except AnsibleActionFail as e: + self.failed = True + return self.get_result(e.args[0]) + + try: + self.wait_for_shutdown() + self.changed = True + except TimeoutError as e: + self.failed = True + self.msg = e.args[0] + + return self.get_result() + + def _cancel_region(self): + run_command_result = self.execute_cancel_shell_cmd( + self.job_name, self.job_id) + if not run_command_result.get(CHANGED) or run_command_result.get(RC) != 0: + raise AnsibleActionFail("Error running job cancel command") + + def _perform_shutdown(self): + shutdown_command = format_shutdown_command( + self.job_name, self.stop_mode, self.sdtran, self.no_sdtran + ) + shutdown_output = self.execute_zos_operator_cmd(shutdown_command) + get_console_errors(shutdown_output) + + def _setup(self, tmp, task_vars): + super(ActionModule, self).run(tmp, task_vars) + self.task_vars = task_vars + self.tmp = tmp + self.module_args = self._task.args.copy() + self.logger = logging.getLogger(__name__) + + self.failed = False + self.changed = False + self.msg = "" + self.executions = [] + + def get_result(self, msg=None): + return { + FAILED: self.failed, + CHANGED: self.changed, + MSG: msg if msg else self.msg, + EXECUTIONS: self.executions, + } + + def _execute_stop_module(self): + stop_module_output = self._execute_module( + module_name=STOP_MODULE_NAME, + module_args=self.module_args, + task_vars=self.task_vars, + tmp=self.tmp, + ) + self.failed = stop_module_output.get(FAILED, self.failed) + self.msg = stop_module_output.get(MSG, self.msg) + + def _parse_module_params(self): + self.job_name = self.module_args.get(JOB_NAME) + self.job_id = self.module_args.get(JOB_ID) + self.stop_mode = self.module_args.get(MODE) + self.sdtran = self.module_args.get(SDTRAN) + self.no_sdtran = self.module_args.get(NO_SDTRAN) + self.timeout = self.module_args.get(TIMEOUT, TIMEOUT_DEFAULT) + self.job_status = EXECUTING + + def _get_job_data(self): + if self.job_id and self.job_name: + self.job_status = self._get_job_status() + elif self.job_name: + running_jobs = self._get_running_jobs() + + if len(running_jobs) == 0: + self.job_status = "MISSING" + return + if len(running_jobs) > 1: + self.job_status = "MULTIPLE" + raise AnsibleActionFail( + "Cannot ambiguate between multiple running jobs with the same name ({0}). Use `job_id` as a parameter to specify the correct job.".format( + self.job_name)) + + self.job_id = running_jobs[0][JOB_ID] + self.job_status = running_jobs[0][STATUS] + + elif self.job_id: + self.job_name = self._get_job_name_from_id() + self.job_status = self._get_job_status() + + def _get_job_name_from_id(self): + job_query_response = self.execute_zos_job_query(self.job_id) + return _get_job_name_from_query(job_query_response, self.job_id) + + def _get_job_status(self): + tso_status_response = self.execute_zos_tso_cmd( + TSO_STATUS_ID_COMMAND.format(self.job_name, self.job_id) + ) + self._add_status_execution("{0}({1})".format( + self.job_name, self.job_id), tso_status_response) + job_status = _get_job_status_name_id( + tso_status_response, self.job_name, self.job_id + ) + + if job_status == "COMBINATION INVALID": + raise AnsibleActionFail( + "No jobs found with name {0} and ID {1}".format(self.job_name, self.job_id)) + return job_status + + def _get_running_jobs(self): + tso_query_response = self.execute_zos_tso_cmd( + TSO_STATUS_COMMAND.format(self.job_name) + ) + self._add_status_execution(self.job_name, tso_query_response) + jobs = _get_job_info_from_status(tso_query_response, self.job_name) + if len(jobs) == 0: + raise AnsibleActionFail( + "Job with name {0} not found".format(self.job_name)) + + running = [] + for job in jobs: + if job[STATUS] == EXECUTING: + running.append(job) + return running + + def _add_status_execution(self, job, result): + self.executions.append({ + NAME: CHECK_CICS_STATUS.format(job), + RC: result.get("max_rc"), + RETURN: result, + }) + + def wait_for_shutdown(self): + end_time = calculate_end_time( + self.timeout) if self.timeout > 0 else None + + self.executions.append({}) + status = EXECUTING + while status == EXECUTING and ( + get_datetime_now() < end_time if end_time else True + ): + self.logger.debug(ACTIVE_AND_WAITING) + time.sleep(15) + + tso_cmd_response = self.execute_zos_tso_cmd( + TSO_STATUS_ID_COMMAND.format(self.job_name, self.job_id) + ) + + self.executions.pop() + self._add_status_execution(self.job_id, tso_cmd_response) + + status = _get_job_status_name_id( + tso_cmd_response, self.job_name, self.job_id + ) + + if status == EXECUTING: + raise TimeoutError( + "Timeout reached before region successfully stopped") + self.logger.debug(SHUTDOWN_SUCCESS) + + def execute_zos_tso_cmd(self, command): + return self._execute_module( + module_name="ibm.ibm_zos_core.zos_tso_command", + module_args={"commands": command}, + task_vars=self.task_vars, + ) + + def execute_zos_job_query(self, job_id): + query_response = self._execute_module( + module_name="ibm.ibm_zos_core.zos_job_query", + module_args={JOB_ID: job_id}, + task_vars=self.task_vars, + ) + self.executions.append({ + NAME: "ZOS Job Query - {0}".format(job_id), + RC: 0 if query_response.get("message", "-") == "" and isinstance(query_response.get("jobs", {}), list) else 1, + RETURN: query_response + }) + return query_response + + def execute_zos_operator_cmd(self, command): + operator_response = self._execute_module( + module_name="ibm.ibm_zos_core.zos_operator", + module_args={"cmd": command}, + task_vars=self.task_vars, + ) + self.executions.append({ + NAME: "ZOS Operator Command - {0}".format(command), + RC: operator_response.get("rc"), + RETURN: operator_response, + }) + return operator_response + + def execute_cancel_shell_cmd(self, job_name, job_id): + # This is borrowed from the Ansible command/shell action plugins + # It's how they run commands from an action plugin on a remote + self._task.args = { + "_uses_shell": True, + "_raw_params": "jcan C {0} {1}".format(job_name, job_id), + } + command_action = self._shared_loader_obj.action_loader.get( + "ansible.legacy.command", + task=self._task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj, + ) + cancel_response = command_action.run(task_vars=self.task_vars) + self.executions.append({ + NAME: "Cancel command - {0}({1})".format(job_name, job_id), + RC: cancel_response.get("rc"), + RETURN: cancel_response, + }) + return cancel_response + + +def get_datetime_now(): + return datetime.now() + + +def calculate_end_time(timeout_seconds: int) -> datetime: + now = get_datetime_now() + offset = timedelta(0, timeout_seconds) + return now + offset + + +def format_cancel_command(job_name, job_id): + return "jcan C {0} {1}".format(job_name, job_id) + + +def format_shutdown_command(job_name, stop_mode, sdtran=None, no_sdtran=None): + shutdown_command = DEFAULT_SHUTDOWN.format(job_name) + if stop_mode == IMMEDIATE: + shutdown_command = IMMEDIATE_SHUTDOWN.format(job_name) + + if sdtran: + return SDTRAN_COMMAND.format(shutdown_command, sdtran.upper()) + if no_sdtran: + return NO_SDTRAN_COMMAND.format(shutdown_command) + + return shutdown_command + + +def get_console_errors(shutdown_result): + shutdown_stdout = ( + "".join(shutdown_result.get("content", [])) + .replace(" ", "") + .replace("\n", "") + .upper() + ) + fail_pattern = r"CICSAUTOINSTALLFORCONSOLE[A-Z]{4}\d{4}HASFAILED" + ignore_pattern = r"CONSOLE[A-Z]{4}\d{4}HASNOTBEENDEFINEDTOCICS.INPUTISIGNORED" + + if re.search(fail_pattern, shutdown_stdout): + raise AnsibleActionFail( + "Shutdown command failed because the auto-install of the console was unsuccessful. See executions for full command output." + ) + if re.search(ignore_pattern, shutdown_stdout): + raise AnsibleActionFail( + "Shutdown command failed because the console used was not defined. See executions for full command output." + ) + + +def _get_job_info_from_status(tso_query_response, job_name): + tso_response_content = tso_query_response["output"][0].get("content") + pattern = r"{0}".format(job_name) + job_strings = [ + line for line in tso_response_content if re.search(pattern, line)] + jobs = [] + for job in job_strings: + if ( + "JOB {0} NOT FOUND".format(job_name) in job.upper() + or "STATUS {0}".format(job_name) in job.upper() + ): + continue + jobs.append({ + JOB_NAME: job_name, + JOB_ID: job.split("(")[1].split(")")[0], + STATUS: job.split(")")[1].strip(), + }) + return jobs + + +def _get_job_name_from_query(job_query_response, job_id): + if job_query_response.get(FAILED): + raise AnsibleActionFail( + "Job query failed - {0}".format( + job_query_response.get("message", "(No failure message provided by zos_job_query)"))) + + jobs = job_query_response.get("jobs", []) + + if len(jobs) == 0: + raise AnsibleActionFail("No jobs found with id {0}".format(job_id)) + elif ( + len(jobs) == 1 + and jobs[0].get(JOB_NAME, "") == "*" + and jobs[0].get("ret_code", {}).get("msg", "") == "JOB NOT FOUND" + ): + raise AnsibleActionFail("No jobs found with id {0}".format(job_id)) + elif len(jobs) > 1: + raise AnsibleActionFail( + "Multiple jobs found with ID {0}".format(job_id)) + + return jobs[0].get(JOB_NAME) + + +def _get_job_status_name_id(tso_status_command_response, job_name, job_id): + if len(tso_status_command_response.get("output", [])) != 1: + raise AnsibleActionFail("Output not received for TSO STATUS command") + + tso_response_content = tso_status_command_response["output"][0].get( + "content") + pattern = r"{0}\({1}\)".format(job_name, job_id) + jobs = [line for line in tso_response_content if re.search(pattern, line)] + if len(jobs) == 0: + raise AnsibleActionFail( + "No jobs found with name {0} and ID {1}".format(job_name, job_id) + ) + if len(jobs) > 1: + raise AnsibleActionFail("Multiple jobs with name and ID found") + return jobs[0].split(")")[1].strip() diff --git a/plugins/action/td_intrapartition.py b/plugins/action/td_intrapartition.py new file mode 100644 index 00000000..88b93b02 --- /dev/null +++ b/plugins/action/td_intrapartition.py @@ -0,0 +1,19 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import _DataSetActionPlugin + + +class ActionModule(_DataSetActionPlugin): + def run(self, tmp=None, task_vars=None): + return super(ActionModule, self)._run( + ds_name="dfhintra", + module_name="td_intrapartition", + cics_data_sets_required=False, + tmp=tmp, + task_vars=task_vars, + ) diff --git a/plugins/action/transaction_dump.py b/plugins/action/transaction_dump.py new file mode 100644 index 00000000..e9519a57 --- /dev/null +++ b/plugins/action/transaction_dump.py @@ -0,0 +1,24 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import _DataSetActionPlugin + + +class ActionModule(_DataSetActionPlugin): + def run(self, tmp=None, task_vars=None): + module_args = self._task.args.copy() + + ds_name = "dfhdmpa" + if module_args.get("destination", "").upper().strip() == "B": + ds_name = "dfhdmpb" + + return super(ActionModule, self)._run( + ds_name=ds_name, + module_name="transaction_dump", + cics_data_sets_required=False, + tmp=tmp, + task_vars=task_vars, + ) diff --git a/plugins/doc_fragments/aux_temp_storage.py b/plugins/doc_fragments/aux_temp_storage.py new file mode 100644 index 00000000..acab9181 --- /dev/null +++ b/plugins/doc_fragments/aux_temp_storage.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the auxiliary temporary storage data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the auxiliary temporary storage data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 200 + space_secondary: + description: + - The size of the secondary space allocated to the auxiliary temporary storage data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the auxiliary temporary storage data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 10 + space_type: + description: + - The unit portion of the auxiliary temporary storage data set size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) and the value + for the secondary space is specified with O(space_secondary). + - This option takes effect only when the auxiliary temporary storage data set is being created. + If the data set already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + records (V(REC)), cylinders (V(CYL)), or tracks (V(TRK)). + required: false + type: str + choices: + - M + - K + - REC + - CYL + - TRK + default: REC + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + type: raw + required: false + region_data_sets: + description: + - The location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). + - If you want to use a data set that already exists, ensure that the data set is an auxiliary temporary storage data set. + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets with a template. + required: false + type: str + dfhtemp: + description: + - Overrides the templated location for the auxiliary temporary storage data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the auxiliary temporary storage to override the template. + type: str + required: false + state: + description: + - The intended state for the auxiliary temporary storage data set, which the module aims to + achieve. + - Specify V(absent) to remove the auxiliary temporary storage data set entirely, if it + already exists. + - Specify V(initial) to create the auxiliary temporary storage data set, if it does not exist. + If the specified data set exists but is empty, the module leaves the data set as is. + If the specified data set exists and has contents, the module deletes the data set and then creates a new, empty data set. + - Specify V(warm) to retain an existing auxiliary temporary storage data set in its current state. + The module checks whether the specified data set exists, and if it does, leaves the data set as is. + If the data set does not exist, the operation fails. + choices: + - "initial" + - "absent" + - "warm" + required: true + type: str +""" diff --git a/plugins/doc_fragments/aux_trace.py b/plugins/doc_fragments/aux_trace.py new file mode 100644 index 00000000..e6acc275 --- /dev/null +++ b/plugins/doc_fragments/aux_trace.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the auxiliary trace data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the auxiliary trace data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 20 + space_secondary: + description: + - The size of the secondary space allocated to the auxiliary trace data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the auxiliary trace data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 4 + space_type: + description: + - The unit portion of the auxiliary trace data set size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) and + the value for the secondary space is specified with O(space_secondary). + - This option takes effect only when the auxiliary trace data set is being created. + If the data set already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + cylinders (V(CYL)), or tracks (V(TRK)). + required: false + type: str + choices: + - M + - K + - CYL + - TRK + default: M + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + type: raw + required: false + region_data_sets: + description: + - The location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). + - If you want to use a data set that already exists, ensure that the data set is an auxiliary trace data set. + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets with a template. + required: false + type: str + dfhauxt: + description: + - Overrides the templated location for the DFHAUXT data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of DFHAUXT to override the template. + type: str + required: false + dfhbuxt: + description: + - Overrides the templated location for the DFHBUXT data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of DFHBUXT to override the template. + type: str + required: false + destination: + description: + - Identify which one of the auxiliary trace data sets is the target of the operation. + If the value is left blank, A is implied, but you can specify A or B. + - Specify V(A) to create or delete the A data set. + - Specify V(B) to create or delete the B data set. This MUST be set for the creation of the B data set. + choices: + - "A" + - "B" + type: str + required: false + default: "A" + state: + description: + - The intended state for the auxiliary trace data set, which the module aims to achieve. + - Specify V(absent) to remove the auxiliary trace data set data set entirely, if it exists. + - Specify V(initial) to create the auxiliary trace data set if it does not exist. + If the specified data set exists but is empty, the module leaves the data set as is. + If the specified data set exists and has contents, the module deletes the data set and then creates a new, empty one. + - Specify V(warm) to retain an existing auxiliary trace data set in its current state. + The module checks whether the specified data set exists, and if it does, leaves the data set as is. + If the data set does not exist, the operation fails. + choices: + - "initial" + - "absent" + - "warm" + required: true + type: str + """ diff --git a/plugins/doc_fragments/cmci.py b/plugins/doc_fragments/cmci.py index 938d7e97..c9139317 100644 --- a/plugins/doc_fragments/cmci.py +++ b/plugins/doc_fragments/cmci.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import (absolute_import, division, print_function) diff --git a/plugins/doc_fragments/csd.py b/plugins/doc_fragments/csd.py new file mode 100644 index 00000000..49e4a4db --- /dev/null +++ b/plugins/doc_fragments/csd.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the CSD. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the CSD is being created. + If the CSD already exists, the option has no effect. + type: int + required: false + default: 4 + space_secondary: + description: + - The size of the secondary space allocated to the CSD. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the CSD is being created. + If the CSD already exists, the option has no effect. + type: int + required: false + default: 1 + space_type: + description: + - The unit portion of the CSD size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) + and the value for the secondary space is specified with O(space_secondary). + - This option takes effect only when the CSD is being created. + If the CSD already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + records (V(REC)), cylinders (V(CYL)), or tracks (V(TRK)). + required: false + type: str + choices: + - M + - K + - REC + - CYL + - TRK + default: M + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + type: raw + required: false + region_data_sets: + description: + - The location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets with a template. + required: false + type: str + dfhcsd: + description: + - Overrides the templated location for the CSD. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the CSD to override the template. + type: str + required: false + cics_data_sets: + description: + - The name of the C(SDFHLOAD) library of the CICS installation, for example, C(CICSTS61.CICS.SDFHLOAD). + type: dict + required: true + suboptions: + template: + description: + - The templated location of the C(SDFHLOAD) library. + required: false + type: str + sdfhload: + description: + - The location of the C(SDFHLOAD) library. If O(cics_data_sets.template) is provided, this value overrides the template. + type: str + required: false + state: + description: + - The intended state for the CSD, which the module aims to achieve. + - Specify V(absent) to remove the CSD entirely, if it already exists. + - Specify V(initial) to create the CSD if it does not already exist, and initialize it by using DFHCSDUP. + - Specify V(warm) to retain an existing CSD in its current state. + The module verifies whether the specified data set exists and whether it contains any records. + If both conditions are met, the module leaves the data set as is. + If the data set does not exist or if it is empty, the operation fails. + - Specify V(changed) to run a DFHCSDUP script to update an existing CSD. + choices: + - "initial" + - "absent" + - "warm" + - "changed" + required: true + type: str + input_location: + description: + - The type of location from which to load the DFHCSDUP script. + - Specify V(DATA_SET) to load from a PDS, PDSE, or sequential data set. + - Specify V(USS) to load from a file on UNIX System Services (USS). + - Specify V(LOCAL) to load from a file local to the Ansible control node. + - Specify V(INLINE) to allow a script to be passed directly through the O(input_content) parameter. + choices: + - "DATA_SET" + - "USS" + - "LOCAL" + - "INLINE" + type: str + required: false + default: "DATA_SET" + input_src: + description: + - The path to the source file that contains the DFHCSDUP script to submit. + - 'It can be a data set. For example: "TESTER.DEFS.SCRIPT" or "TESTER.DEFS(SCRIPT)"' + - 'It can be a USS file. For example: "/u/tester/defs/script.csdup"' + - 'It can be a local file. For example: "/User/tester/defs/script.csdup"' + type: str + required: false + input_content: + description: + - The content of the DFHCSDUP script to submit, if you are using the O(input_location=INLINE) option. + type: str + required: false + log: + description: + - Specify the recovery attribute for the CSD, overriding the CSD system initialization parameters. + - Specify NONE for a nonrecoverable CSD. + - Specify UNDO for a CSD that is limited to file backout only. + - Specify ALL for a CSD for which you want both forward recovery and file backout. If you specify O(log=ALL), you + must also specify LOGSTREAMID to identify the 26-character name of the z/OS™ log stream to be used as the + forward recovery log. The CICS collection does not support defining forward recovery log streams; you + must follow the instructions in L(Defining forward recovery log streams, + https://www.ibm.com/docs/en/cics-ts/latest?topic=journaling-defining-forward-recovery-log-streams). + choices: + - "NONE" + - "UNDO" + - "ALL" + required: false + type: str + logstream_id: + description: + - The 26-character name of the z/OS™ log stream to be used as the forward recovery log. + - This is required when you use O(log=ALL). + type: str + required: false + """ diff --git a/plugins/doc_fragments/global_catalog.py b/plugins/doc_fragments/global_catalog.py new file mode 100644 index 00000000..681227b8 --- /dev/null +++ b/plugins/doc_fragments/global_catalog.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the global catalog data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the global catalog data set is being created. + If the global catalog data set already exists, the option has no effect. + type: int + required: false + default: 5 + space_secondary: + description: + - The size of the secondary space allocated to the global catalog data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the global catalog data set is being created. + If the global catalog data set already exists, the option has no effect. + type: int + required: false + default: 1 + space_type: + description: + - The unit portion of the global catalog data set size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) and + the value for the secondary space is specified with O(space_secondary). + - This option takes effect only when the global catalog data set is being created. + If the global catalog data set already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + records (V(REC)), cylinders (V(CYL)), or tracks (V(TRK)). + required: false + type: str + choices: + - M + - K + - REC + - CYL + - TRK + default: M + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + type: raw + required: false + region_data_sets: + description: + - The location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). + - If you want to use a data set that already exists, ensure that the data set is a global catalog data set. + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets with a template. + required: false + type: str + dfhgcd: + description: + - Overrides the templated location for the global catalog data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the global catalog to override the template. + type: str + required: false + cics_data_sets: + description: + - The name of the C(SDFHLOAD) library of the CICS installation, for example, C(CICSTS61.CICS.SDFHLOAD). + - This module uses the C(DFHRMUTL) utility internally, which is found in the C(SDFHLOAD) library. + type: dict + required: true + suboptions: + template: + description: + - The templated location of the C(SDFHLOAD) library. + required: false + type: str + sdfhload: + description: + - The location of the C(SDFHLOAD) library. If O(cics_data_sets.template) is provided, this value overrides the template. + type: str + required: false + state: + description: + - The intended state for the global catalog data set, which the module aims to achieve. + - Specify V(absent) to remove the global catalog data set entirely, if it exists. + - Specify V(initial) to set the autostart override record to C(AUTOINIT). + If the specified global catalog data set does not already exist, the module creates the data set. + - Specify V(cold) to set the autostart override record of an existing global catalog to C(AUTOCOLD). + If the specified global catalog data set does not already exist, the operation fails. + - Specify V(warm) to set the autostart override record of an existing global catalog to C(AUTOASIS), + undoing any previous setting of C(AUTOINIT) or C(AUTOCOLD). The module verifies whether the specified + data set exists and whether it contains any records. If either condition is not met, the operation fails. + choices: + - "absent" + - "initial" + - "cold" + - "warm" + required: true + type: str + """ diff --git a/plugins/doc_fragments/local_catalog.py b/plugins/doc_fragments/local_catalog.py new file mode 100644 index 00000000..6a5fa400 --- /dev/null +++ b/plugins/doc_fragments/local_catalog.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the local catalog data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the local catalog data set is being created. + If the local catalog data set already exists, the option has no effect. + type: int + required: false + default: 200 + space_secondary: + description: + - The size of the secondary space allocated to the local catalog data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the local catalog data set is being created. + If the local catalog data set already exists, the option has no effect. + type: int + required: false + default: 5 + space_type: + description: + - The unit portion of the local catalog data set size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) and + the value for the secondary space is specified with O(space_secondary). + - This option takes effect only when the local catalog data set is being created. + If the local catalog data set already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + records (V(REC)), cylinders (V(CYL)), or tracks (V(TRK)). + required: false + type: str + choices: + - M + - K + - REC + - CYL + - TRK + default: REC + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + type: raw + required: false + region_data_sets: + description: + - The location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). + - If you want to use a data set that already exists, ensure that the data set is a local catalog data set. + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets with a template. + required: false + type: str + dfhlcd: + description: + - Overrides the templated location for the local catalog data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the local catalog to override the template. + type: str + required: false + cics_data_sets: + description: + - The name of the C(SDFHLOAD) library of the CICS installation, for example, C(CICSTS61.CICS.SDFHLOAD). + - This module uses the C(DFHCCUTL) utility internally, which is found in the C(SDFHLOAD) library. + type: dict + required: true + suboptions: + template: + description: + - The templated location of the C(SDFHLOAD) library. + required: false + type: str + sdfhload: + description: + - The location of the C(SDFHLOAD) library. If O(cics_data_sets.template) is provided, this value overrides the template. + type: str + required: false + state: + description: + - The intended state for the local catalog, which the module aims to achieve. + - Specify V(absent) to remove the local catalog data set entirely, if it already exists. + - Specify V(initial) to create the local catalog data set if it does not exist, + or empty this existing local catalog of all records. + - Specify V(warm) to retain an existing local catalog in its current state. + The module verifies whether the specified data set exists and whether it contains any records. + If both conditions are met, the module leaves the data set as is. + If the data set does not exist or if it is empty, the operation fails. + choices: + - "initial" + - "absent" + - "warm" + required: true + type: str + """ diff --git a/plugins/doc_fragments/local_request_queue.py b/plugins/doc_fragments/local_request_queue.py new file mode 100644 index 00000000..13bd89ef --- /dev/null +++ b/plugins/doc_fragments/local_request_queue.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the local request queue data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect when the local request queue data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 4 + space_secondary: + description: + - The size of the secondary space allocated to the local request queue data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect when the local request queue data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 1 + space_type: + description: + - The unit portion of the local request queue data set size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) and + the value for the secondary space is specified with O(space_secondary). + - This option takes effect only when the local request queue data set is being created. + If the data set already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + records (V(REC)), cylinders (V(CYL)), or tracks (V(TRK)). + required: false + type: str + choices: + - M + - K + - REC + - CYL + - TRK + default: M + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + type: raw + required: false + region_data_sets: + description: + - The location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). + - If you want to use a data set that already exists, ensure that the data set is a local request queue data set. + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets with a template. + required: false + type: str + dfhlrq: + description: + - Overrides the templated location for the local request queue data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the local request queue to override the template. + type: str + required: false + state: + description: + - The intended state for the local request queue, which the module aims to achieve. + - Specify V(absent) to remove the local request queue data set entirely, if it exists. + - Specify V(initial) to create the local request queue data set if it does not exist, + or empty this existing local request queue of all records. + - Specify V(warm) to retain an existing local request queue data set in its current state. + The module checks whether the specified data set exists, and if it does, leaves the data set as is. + If the data set does not exist, the operation fails. + choices: + - "initial" + - "absent" + - "warm" + required: true + type: str + """ diff --git a/plugins/doc_fragments/region_jcl.py b/plugins/doc_fragments/region_jcl.py new file mode 100644 index 00000000..203285cc --- /dev/null +++ b/plugins/doc_fragments/region_jcl.py @@ -0,0 +1,2649 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the CICS startup JCL data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the CICS startup JCL data set is being created. + If the CICS startup JCL data set already exists, the option has no effect. + - If this option is not set, the primary space is dynamically calculated based on the + size of the generated CICS startup JCL. + - If the target data set is a member in a PDS or PDSE, then this value does not have any effect. + type: int + required: false + space_secondary: + description: + - The size of the secondary space allocated to the CICS startup JCL data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the CICS startup JCL data set is being created. + If the CICS startup JCL data set already exists, the option has no effect. + - If this option is not set, the secondary space is dynamically calculated as 10% of + the total size of the generated CICS startup JCL. + - If the target data set is a member in a PDS or PDSE, then this value does not have any effect. + type: int + required: false + space_type: + description: + - The unit portion of the CICS startup JCL data set size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) + and the value for the secondary space is specified with O(space_secondary). + - This option takes effect only when the CICS startup JCL data set is being created. + If the CICS startup JCL data set already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + cylinders (V(CYL)), or tracks (V(TRK)). + - If neither O(space_secondary) nor O(space_primary) is set, then this value does + not have any effect. + - If the target data set is a member in a PDS or PDSE, then this value does not have any effect. + required: false + type: str + choices: + - M + - K + - CYL + - TRK + default: M + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + - If the target data set is a member in a PDS or PDSE, then this value does not have any effect. + type: raw + required: false + state: + description: + - The intended state for the CICS startup JCL data set, which the module aims to achieve. + - Specify V(absent) to remove the CICS startup JCL data set entirely, if it already exists. + - Specify V(initial) to create the CICS startup JCL data set if it does not already exist. + - Specify V(warm) to retain an existing CICS startup JCL data set in its current state. + The module verifies whether the specified data set exists and whether it matches the + generated startup JCL. + If both conditions are met, the module leaves the data set as is. + If the data set does not exist or does not match, the operation fails. + choices: + - "initial" + - "absent" + - "warm" + required: true + type: str + job_parameters: + description: + - Specifies various parameters to be applied to the CICS startup job. + type: dict + required: false + suboptions: + accounting_information: + description: + - Allows jobs to be grouped into a class. + type: dict + required: false + suboptions: + pano: + description: + - Specifies the programmer's accounting number. The value is 1 through 4 alphanumeric characters. + type: str + required: false + room: + description: + - Specifies the programmer's room number. The value is 1 through 4 alphanumeric characters. + type: str + required: false + time: + description: + - Specifies the estimated execution time in minutes. The value is 1 through 4 decimal numbers. For example, + code 30 for 30 minutes. If you omit a time subparameter and a TIME parameter on the JES2 /*JOBPARM + statement, JES2 uses an installation default specified at initialization. If job execution exceeds the + time, JES2 sends a message to the operator. + type: int + required: false + lines: + description: + - Specifies the estimated line count, in thousands of lines, from this job's sysout data sets. The value is 1 + through 4 decimal numbers. For example, code 5 for 5000 lines. If you omit lines, JES2 uses an + installation default specified at initialization. + type: int + required: false + cards: + description: + - Specifies the estimated number of cards JES2 is to punch from this job's sysout data sets. The value is 1 + through 4 decimal numbers. If you omit cards, JES2 uses an installation default specified at + initialization. + type: int + required: false + forms: + description: + - Specifies the forms that JES2 is to use for printing this job's sysout data sets. The value is 1 through 4 + alphanumeric characters. For example, code 5 for 5-part forms. If you omit forms, JES2 uses an + installation default specified at initialization. + type: str + required: false + copies: + description: + - Specifies the number of times JES2 is to print or punch this job's sysout data sets. The value is 1 + through 3 decimal numbers and must not exceed an installation-specified limit. The maximum is 255. For + example, code 2 for two copies. If you omit copies, JES2 assumes one copy. + type: int + required: false + log: + description: + - Specifies whether JES2 is to print the job log. Code N to surpress printing of the job log. If you code any + other character or omit this subparameter, JES2 prints the job log. If your installation specified NOLOG + for this job's class during JES2 initialization, JES2 does not print the job log. + type: str + required: false + linect: + description: + - Specifies the number of lines JES2 is to print per page for this job's sysout data sets. The value is 1 + through 3 decimal numbers. If you omit linect, JES2 uses an installation default specified at + initialization. If you code a zero, JES2 does not eject to a new page when the number of lines exceeds + the installation default. + type: int + required: false + class: + description: + - Allows jobs to be grouped into a class. + type: str + required: false + job_name: + description: + - The name of the CICS startup job. The default value is C(APPLID). + type: str + required: false + memlimit: + description: + - Use the MEMLIMIT parameter to specify the limit on the total size of usable 64-bit z/OS storage in a + single address space. + type: str + required: false + msglevel: + description: + - Use the MSGLEVEL parameter to control the listing of the JCL output for the job. + type: dict + required: false + suboptions: + statements: + description: + - Indicates which job control statements the system is to print in the statement images portion of the JCL + output. + type: int + choices: + - "0" + - "1" + - "2" + required: false + messages: + description: + - Indicates which messages the system is to print in the system messages portion of the JCL output. + type: int + choices: + - "0" + - "1" + required: false + msgclass: + description: + - Use the MSGCLASS parameter to assign the job log to an output class. The job log is a record of + job-related information for the programmer. + type: str + required: false + programmer_name: + description: + - Use the programmer's name parameter to identify the person or group responsible for a job. + type: str + required: false + region: + description: + - Use the REGION parameter to specify the amount of central or virtual storage that the job requires. The + system applies the value that you code on REGION to each step of the job. + type: str + required: false + user: + description: + - Code the USER parameter to identify to the system the person submitting the job. The user ID is used by + RACF®, the system resources manager (SRM), and other system components. + type: str + required: false + applid: + description: + - The name of your z/OS Communications Server application identifier for this CICS region. + type: str + required: true + cics_data_sets: + description: + - The data set names of the C(SDFHAUTH), C(SDFHLOAD) and C(SDFHLIC) libraries, for example, + C(CICSTS61.CICS.SDFHAUTH) and C(CICSTS61.CICS.SDFHLOAD). + type: dict + required: true + suboptions: + template: + description: + - The templated location of the libraries. + type: str + required: false + sdfhauth: + description: + - The location of the C(SDFHAUTH) librarty to override the template. + type: str + required: false + sdfhload: + description: + - The location of the C(SDFHLOAD) library. If O(cics_data_sets.template) is provided, this value overrides the template. + type: str + required: false + sdfhlic: + description: + - The location of the C(SDFHLIC) library. If O(cics_data_sets.template) is provided, this value overrides the template. + type: str + required: false + le_data_sets: + description: + - The data set names of the C(SCEECICS), C(SCEERUN) and C(SCEERUN2) libraries. + type: dict + required: true + suboptions: + template: + description: + - The templated location of the Language Enviornment runtime libraries. + required: false + type: str + sceecics: + description: + - The location of the C(SCEECICS) library. If O(le_data_sets.template) is provided, this value overrides the template. + type: str + required: false + sceerun: + description: + - The location of the C(SCEERUN) library. If O(le_data_sets.template) is provided, this value overrides the template. + type: str + required: false + sceerun2: + description: + - The location of the C(SCEERUN2) library. If O(le_data_sets.template) is provided, this value overrides the template. + type: str + required: false + cpsm_data_sets: + description: + - The data set names of the C(SEYUAUTH) and C(SEYULOAD) libraries, for example, C(CTS610.CPSM610.SEYUAUTH). + type: dict + required: false + suboptions: + template: + description: + - The templated location of the CICSPlex SM libraries. + required: false + type: str + seyuauth: + description: + - The location of the C(SEYUAUTH) library. If O(cpsm_data_sets.template) is provided, this value overrides the template. + required: false + type: str + seyuload: + description: + - The location of the C(SEYULOAD) library. If O(cpsm_data_sets.template) is provided, this value overrides the template. + required: false + type: str + steplib: + description: + - Any locations of additional data sets other than C(SDFHAUTH), C(SDFHLIC), C(SCEERUN), or C(SCEERUN2), to be added to the STEPLIB concatenation. + The STEPLIB concatenation is where you specify the libraries that contain the modules loaded by the z/OS operating system. + You can either add data sets at the very top of the list or append them to the bottom of the list. There are other data sets in between, + as determined by the defaults or other input parameters; for example, C(SEYUAUTH) and C(SEYULOAD) as sepcified with O(cpsm_data_sets), + C(SCEERUN) and C(SCEERUN2) as specified with O(le_data_sets), C(SDFHAUTH) and C(SDFHLIC) as specified with O(cics_data_sets), and so on. + type: dict + required: false + suboptions: + top_data_sets: + description: + - The C(STEPLIB) data sets to be added to the very top of the list. + type: list + required: false + elements: str + data_sets: + description: + - The C(STEPLIB) data sets to be added to the bottom of the list. + type: list + required: false + elements: str + dfhrpl: + description: + - Any locations of additional data sets other than C(SDFHLOAD), C(SCEECICS), C(SCEERUN), or C(SCEERUN2), to be added to the DFHRPL concatenation. + The DFHRPL concatenation is where you specify the libraries that contain modules loaded by CICS, for example, the libraries + containing your CICS application programs, your CICS control tables, and so on. You can either add data sets at the very top of the + list or append them to the bottom of the list. There are other data sets in between, as determined by the defaults or other input + parameters; for example, C(SCEERUN) and C(SCEERUN2) as specified with O(le_data_sets), C(SDFHLOAD) as specified with O(cics_data_sets), and so on. + type: dict + required: false + suboptions: + top_data_sets: + description: + - The C(DFHRPL) data sets to be added to the very top of the list. + required: false + type: list + elements: str + data_sets: + description: + - The C(DFHRPL) data sets to be added to the bottom of the list. + type: list + required: false + elements: str + region_data_sets: + description: + - The location of the region data sets, for example, C(REGIONS.ABCD01.DFHAUXT), C(REGIONS.ABCD01.DFHCSD) and + C(REGIONS.ABCD01.DFHGCD). + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). This is not required if you provide the data set + name (dsn) of all the data sets individually. + required: false + type: str + dfhauxt: + description: + - Overrides the templated location for the auxiliary trace A data set. + required: false + type: dict + suboptions: + dsn: + description: + - The name of the auxiliary trace A data set to override the template. + type: str + required: false + dfhbuxt: + description: + - Overrides the templated location for the auxiliary trace B data set. + required: false + type: dict + suboptions: + dsn: + description: + - The name of the auxiliary trace B data set to override the template. + type: str + required: false + dfhcsd: + description: + - Overrides the templated location for the CSD. + required: false + type: dict + suboptions: + dsn: + description: + - The name of the CSD to override the template. + type: str + required: false + dfhdmpa: + description: + - Overrides the templated location for the dump A data set. + required: false + type: dict + suboptions: + dsn: + description: + - The name of the dump A data set to override the template. + type: str + required: false + dfhdmpb: + description: + - Overrides the templated location for the dump B data set. + required: false + type: dict + suboptions: + dsn: + description: + - The name of the dump B data set to override the template. + type: str + required: false + dfhlrq: + description: + - Overrides the templated location for the local request queue data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the local request queue to override the template. + type: str + required: false + dfhgcd: + description: + - Overrides the templated location for the global catalog data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the global catalog to override the template. + type: str + required: false + dfhlcd: + description: + - Overrides the templated location for the local catalog data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the local catalog to override the template. + type: str + required: false + dfhintra: + description: + - Overrides the templated location for the intrapartition data set. + required: false + type: dict + suboptions: + dsn: + description: + - The name of the intrapartition data set to override the template. + type: str + required: false + dfhtemp: + description: + - Overrides the templated location for the temporary storage data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the temporary storage to override the template. + type: str + required: false + dfhstart: + description: + - Overrides the templated location for the CICS startup JCL data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the CICS startup JCL data set to override the template. + - The data set name can also be set to a member of an existing PDS or PDSE. + type: str + required: false + output_data_sets: + description: + - The system output data sets such as C(CEEMSG) and C(SYSPRINT), as well as the destination class of the output. + type: dict + required: false + suboptions: + default_sysout_class: + description: + - The class to be applied as the default for all of the output data sets. If it isn't provided and if no + overrides are specified for an individual output data set, * is applied. + type: str + required: false + ceemsg: + description: + - Overrides the default class to use a custom class for the C(CEEMSG) data set. Alternatively, omit the + C(CEEMSG) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(CEEMSG) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(CEEMSG) should be excluded from being added to the list of sysout data sets. + type: bool + required: false + ceeout: + description: + - Overrides the default class to use a custom class for the C(CEEOUT) data set. Alternatively, omit the + C(CEEOUT) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(CEEOUT) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(CEEOUT) should be excluded from being added to the list of sysout data sets. + type: bool + required: false + msgusr: + description: + - Overrides the default class to use a custom class for the C(MSGUSR) data set. Alternatively, omit the + C(MSGUSR) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(MSGUSR) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(MSGUSR) should be excluded from being added to the list of sysout data sets. + type: bool + required: false + sysprint: + description: + - Overrides the default class to use a custom class for the C(SYSPRINT) data set. Alternatively, omit the + C(SYSPRINT) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(SYSPRINT) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(SYSPRINT) should be excluded from being added to the list of sysout data sets. + required: false + type: bool + sysudump: + description: + - Overrides the default class to use a custom class for the C(SYSUDUMP) data set. Alternatively, omit the + C(SYSUDUMP) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(SYSUDUMP) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(SYSUDUMP) should be excluded from being added to the list of sysout data sets. + required: false + type: bool + sysabend: + description: + - Overrides the default class to use a custom class for the C(SYSABEND) data set. Alternatively, omit the + C(SYSABEND) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(SYSABEND) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(SYSABEND) should be excluded from being added to the list of sysout data sets. + type: bool + required: false + sysout: + description: + - Overrides the default class to use a custom class for the C(SYSOUT) data set. Alternatively, omit the + C(SYSOUT) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(SYSOUT) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(SYSOUT) should be excluded from being added to the list of sysout data sets. + type: bool + required: false + dfhcxrf: + description: + - Overrides the default class to use a custom class for the C(DFHCXRF) data set. Alternatively, omit the + C(DFHCXRF) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(DFHCXRF) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(DFHCXRF) should be excluded from being added to the list of sysout data sets. + type: bool + required: false + logusr: + description: + - Overrides the default class to use a custom class for the C(LOGUSR) data set. Alternatively, omit the + C(LOGUSR) data set from being added to the job. + type: dict + required: false + suboptions: + sysout: + description: + - Specify the output class to assign the C(LOGUSR) data set to. + type: str + required: false + omit: + description: + - Specifies whether C(LOGUSR) should be excluded from being added to the list of sysout data sets. + type: bool + required: false + sit_parameters: + description: + - Define the system initalization parameters for the CICS region. + type: dict + required: false + suboptions: + adi: + description: + - The ADI parameter specifies the alternate delay interval in seconds for an alternate CICS® region when you + are running CICS with XRF. + required: false + type: int + aibridge: + description: + - The AIBRIDGE parameter specifies whether the autoinstall user replaceable module (URM) is to be called + when creating bridge facilities (virtual terminals) used by the 3270 bridge mechanism. + - Specify this parameter only in the bridge router region. + required: false + type: str + choices: + - AUTO + - "YES" + aicons: + description: + - The AICONS parameter specifies whether you want autoinstall support for consoles. + required: false + type: str + choices: + - "NO" + - AUTO + - "YES" + aiexit: + description: + - The AIEXIT parameter specifies the name of the autoinstall user-replaceable program that you want CICS® to + use when autoinstalling local z/OS® Communications Server terminals, APPC connections, virtual terminals, + and shipped terminals and connections. + required: false + type: str + aildelay: + description: + - The AILDELAY parameter specifies the delay period that elapses after all sessions between CICS® and an + autoinstalled terminal, APPC device, or APPC system are ended, before the terminal or connection entry is + deleted. + required: false + type: int + aiqmax: + description: + - The AIQMAX parameter specifies the maximum number of z/OS® Communications Server terminals and APPC + connections that can be queued concurrently for autoinstall, the limit is the sum of installs and deletes. + required: false + type: int + airdelay: + description: + - The AIRDELAY parameter specifies the delay period that elapses after an emergency restart before + autoinstalled terminal and APPC connection entries that are not in session are deleted. + required: false + type: int + akpfreq: + description: + - The AKPFREQ parameter specifies the number of write requests to the CICS® system log stream output buffer + required before CICS writes an activity keypoint. + required: false + type: int + autconn: + description: + - The AUTCONN parameter specifies that the reconnection of terminals after an XRF takeover is to be delayed, + to allow time for manual switching. + required: false + type: int + autodst: + description: + - The AUTODST parameter specifies whether CICS is to activate automatic dynamic storage tuning for + application programs. + required: false + type: str + choices: + - "NO" + - "YES" + autoresettime: + description: + - The AUTORESETTIME parameter specifies the action CICS takes for automatic time changes. + required: false + type: str + choices: + - IMMEDIATE + - "NO" + - "YES" + auxtr: + description: + - The AUXTR parameter specifies whether the auxiliary trace destination is to be activated at system + initialization. + required: false + type: str + choices: + - "OFF" + - "ON" + auxtrsw: + description: + - The AUXTRSW parameter specifies whether you want the auxiliary trace autoswitch facility. + required: false + type: str + choices: + - "NO" + - NEXT + - ALL + bms: + description: + - The BMS system initialization parameter specifies which version of basic mapping support you require in + CICS. + required: false + type: str + brmaxkeeptime: + description: + - The BRMAXKEEPTIME parameter specifies the maximum time (in seconds) that bridge facilities (virtual + terminals used by the 3270 bridge) are kept if they are not used. + required: false + type: int + cdsasze: + description: + - The CDSASZE system initialization parameter specifies the size of the CDSA. + required: false + type: int + certexpirywarn: + description: + - The CERTEXPIRYWARN parameter specifies whether CICS® warns about expiring certificates, and if so, how many days ahead of the expiry. + required: false + type: str + chkstrm: + description: + - The CHKSTRM parameter specifies that terminal storage-violation checking is to be activated or + deactivated. + required: false + type: str + choices: + - CURRENT + - NONE + chkstsk: + description: + - The CHKSTSK parameter specifies that task storage-violation checking at startup is to be activated or + deactivated. + required: false + type: str + choices: + - CURRENT + - NONE + cicssvc: + description: + - The CICSSVC parameter specifies the number that you have assigned to the CICS type 3 SVC. + required: false + type: int + cilock: + description: + - The CILOCK parameter specifies whether or not the control interval lock of a non-RLS VSAM file is to be + kept after a successful read-for-update request. + required: false + type: str + choices: + - "NO" + - "YES" + clintcp: + description: + - The CLINTCP parameter specifies the default client code page to be used by the DFHCNV data conversion + table, but only if the CLINTCP parameter in the DFHCNV macro is set to SYSDEF. + required: false + type: str + clsdstp: + description: + - The CLSDSTP system initialization parameter specifies the notification required for an EXEC CICS ISSUE + PASS command. + required: false + type: str + choices: + - NOTIFY + - NONOTIFY + clt: + description: + - The CLT parameter specifies the suffix for the command list table (CLT), if this SIT is used by an + alternate XRF system. + required: false + type: str + cmdprot: + description: + - The CMDPROT parameter specifies whether to allow or inhibit CICS validation of start addresses of storage + referenced as output parameters on EXEC CICS commands. + required: false + type: str + choices: + - "YES" + - "NO" + cmdsec: + description: + - The CMDSEC parameter specifies whether or not you want CICS to honor the CMDSEC option specified on a + transaction's resource definition. + required: false + type: str + choices: + - ASIS + - ALWAYS + confdata: + description: + - The CONFDATA parameter specifies whether CICS is to redact sensitive data that might otherwise appear in + CICS trace entries or in dumps. + required: false + type: str + choices: + - HIDE + - SHOW + conftxt: + description: + - The CONFTXT system initialization parameter specifies whether CICS is to prevent z/OS Communications + Server from tracing user data. + required: false + type: str + choices: + - "NO" + - "YES" + cpsmconn: + description: + - The CPSMCONN parameter specifies whether you want CICS to invoke the specified component during + initialization of the region. + required: false + type: str + choices: + - "NO" + - CMAS + - LMAS + - SMSSJ + - WUI + crlprofile: + description: + - The CRLPROFILE parameter specifies the name of the profile that is used to authorize CICS to access the + certification revocation lists (CRLs) that are stored in an LDAP server. + required: false + type: str + csdacc: + description: + - The CSDACC parameter specifies the type of access to the CSD to be permitted to this CICS region. + required: false + type: str + choices: + - READWRITE + - READONLY + csdbkup: + description: + - The CSDBKUP parameter specifies whether or not the CSD is eligible for BWO. + required: false + type: str + choices: + - STATIC + - DYNAMIC + csdbufnd: + description: + - The CSDBUFND parameter specifies the number of buffers to be used for CSD data. + required: false + type: int + csdbufni: + description: + - The CSDBUFNI parameter specifies the number of buffers to be used for the CSD index. + required: false + type: int + csddisp: + description: + - The CSDDISP parameter specifies the disposition of the data set to be allocated to the CSD. + required: false + type: str + choices: + - OLD + - SHR + csddsn: + description: + - The CSDDSN parameter specifies the 1-44 character JCL data set name (DSNAME) to be used for the CSD. + required: false + type: str + csdfrlog: + description: + - The CSDFRLOG parameter specifies a number that corresponds to the journal name that CICS uses to identify + the forward recovery log stream for the CSD. + required: false + type: int + csdinteg: + description: + - The CSDINTEG parameter specifies the level of read integrity for the CSD if it is accessed in RLS mode. + required: false + type: str + choices: + - UNCOMMITTED + - CONSISTENT + - REPEATABLE + csdjid: + description: + - The CSDJID parameter specifies the journal identifier of the journal that you want CICS to use for + automatic journaling of file requests against the CSD. + required: false + type: str + csdlsrno: + description: + - The CSDLSRNO system initialization parameter specifies whether the CSD is to be associated with a local + shared resource (LSR) pool. + required: false + type: str + csdrecov: + description: + - The CSDRECOVsystem initialization parameter specifies whether the CSD is a recoverable file. + required: false + type: str + choices: + - NONE + - ALL + - BACKOUTONLY + csdrls: + description: + - The CSDRLS system initialization parameter specifies whether CICS is to access the CSD in RLS mode. + required: false + type: str + choices: + - "NO" + - "YES" + csdstrno: + description: + - The CSDSTRNO system initialization parameter specifies the number of concurrent requests that can be + processed against the CSD. + required: false + type: int + cwakey: + description: + - The CWAKEY system initialization parameter specifies the storage key for the common work area (CWA) if + you are operating CICS with storage protection (STGPROT=YES). + required: false + type: str + choices: + - USER + - CICS + dae: + description: + - The DAE system initialization parameter specifies the default DAE action when new system dump table + entries are created. + required: false + type: str + choices: + - "NO" + - "YES" + datform: + description: + - The DATFORM system initialization parameter specifies the external date display standard that you want to + use for CICS date displays. + required: false + type: str + choices: + - MMDDYY + - DDMMYY + - YYMMDD + db2conn: + description: + - The DB2CONN system initialization parameter specifies whether you want CICS to start the connection + automatically during initialization. + required: false + type: str + choices: + - "NO" + - "YES" + dbctlcon: + description: + - The DBCTLCON system initialization parameter specifies whether you want CICS to start the DBCTL connection + automatically during initialization. + required: false + type: str + choices: + - "NO" + - "YES" + debugtool: + description: + - The DEBUGTOOL system initialization parameter specifies whether you want to use debugging profiles to + select the programs that will run under the control of a debugging tool. + required: false + type: str + choices: + - "NO" + - "YES" + dfltuser: + description: + - The DFLTUSER system initialization parameter specifies the RACF userid of the default user; that is, the + user whose security attributes are used to protect CICS resources in the absence of other, more specific, + user identification. + required: false + type: str + dip: + description: + - The DIP system initialization parameter specifies whether the batch data interchange program, DFHDIP, is + to be included. + required: false + type: str + choices: + - "NO" + - "YES" + dismacp: + description: + - The DISMACP system initialization parameter specifies whether CICS is to disable any transaction that + terminates abnormally with an ASRD or ASRE abend. + required: false + type: str + choices: + - "NO" + - "YES" + doccodepage: + description: + - The DOCCODEPAGE system initialization parameter specifies the default host code page to be used by the + document domain. + required: false + type: str + dsalim: + description: + - The DSALIM system initialization parameter specifies the upper limit of the total amount of storage within + which CICS® can allocate the individual dynamic storage areas (DSAs) that reside in 24-bit storage. + required: false + type: str + dshipidl: + description: + - The DSHIPIDL system initialization parameter specifies the minimum time, in hours, minutes, and seconds, + that an inactive shipped terminal definition must remain installed in this region. + required: false + type: int + dshipint: + description: + - The DSHIPINT system initialization parameter specifies the interval between invocations of the timeout + delete mechanism. + required: false + type: int + dsrtpgm: + description: + - The DSRTPGM system initialization parameter specifies the name of a distributed routing program. The + distributed routing program must be specified in the DSRTPGM parameter for all routing and potential + target regions. + required: false + type: str + dtrpgm: + description: + - The DTRPGM system initialization parameter specifies the name of a dynamic routing program. + required: false + type: str + dtrtran: + description: + - The DTRTRAN system initialization parameter specifies the name of the transaction definition that you want + CICS to use for dynamic transaction routing. + required: false + type: str + dump: + description: + - The DUMP system initialization parameter specifies whether the CICS dump domain is to take SDUMPs. + required: false + type: str + choices: + - "YES" + - "NO" + - TABLEONLY + dumpds: + description: + - The DUMPDS system initialization parameter specifies the transaction dump data set that is to be opened + during CICS initialization. + required: false + type: str + choices: + - AUTO + - A + - B + dumpsw: + description: + - The DUMPSW system initialization parameter specifies whether you want CICS to switch automatically to the + next dump data set when the first is full. + required: false + type: str + choices: + - "NO" + - NEXT + - ALL + duretry: + description: + - The DURETRY system initialization parameter specifies, in seconds, the total time that CICS is to continue + trying to obtain a system dump using the SDUMP macro. + required: false + type: int + ecdsasze: + description: + - The ECDSASZE system initialization parameter specifies the size of the ECDSA. + required: false + type: str + edsalim: + description: + - The EDSALIM system initialization parameter specifies the upper limit of the total amount of storage + within which CICS® can allocate the individual extended dynamic storage areas (ExxDSAs) that reside in + 31-bit (above-the-line) storage; that is, above 16 MB but below 2 GB. + required: false + type: str + eodi: + description: + - The EODI system initialization parameter specifies the end-of-data indicator for input from sequential + devices. + required: false + type: str + erdsasze: + description: + - The ERDSASZE system initialization parameter specifies the size of the ERDSA. + required: false + type: str + esdsasze: + description: + - The ESDSASZE system initialization parameter specifies the size of the ESDSA. + required: false + type: str + esmexits: + description: + - The ESMEXITS system initialization parameter specifies whether installation data is to be passed through + the RACROUTE interface to the external security manager (ESM) for use in exits written for the ESM. + required: false + type: str + choices: + - NOINSTLN + - INSTLN + eudsasze: + description: + - The EUDSASZE system initialization parameter specifies the size of the EUDSA. + required: false + type: str + fcqronly: + description: + - The FCQRONLY system initialization parameter specifies whether you want CICS to force all file control + requests to run under the CICS QR TCB. This parameter applies to file control requests that access VSAM + RLS files and local VSAM LSR files. + required: false + type: str + choices: + - "NO" + - "YES" + fct: + description: + - The FCT system initialization parameter specifies the suffix of the file control table to be used. + required: false + type: str + fepi: + description: + - The FEPI system initialization parameter specifies whether or not you want to use the Front End + Programming Interface feature (FEPI). + required: false + type: str + choices: + - "NO" + - "YES" + fldsep: + description: + - The FLDSEP system initialization parameter specifies 'ON'e through four field-separator characters, each + of which indicates end of field in the terminal input data. + required: false + type: str + fldstrt: + description: + - The FLDSTRT system initialization parameter specifies a single character to be the field-name-start + character for free-form input for built-in functions. + required: false + type: str + forceqr: + description: + - The FORCEQR system initialization parameter specifies whether you want CICS to force all CICS API user + application programs that are specified as threadsafe to run under the CICS QR TCB, as if they were + specified as quasi-reentrant programs. + required: false + type: str + choices: + - "NO" + - "YES" + fsstaff: + description: + - The FSSTAFF system initialization parameter prevents transactions initiated by function-shipped EXEC CICS + START requests being started against incorrect terminals. + required: false + type: str + choices: + - "YES" + - "NO" + ftimeout: + description: + - The FTIMEOUT system initialization parameter specifies a timeout interval for requests made on files that + are opened in RLS mode. + required: false + type: int + gmtext: + description: + - The GMTEXT system initialization parameter specifies whether the default logon message text (WELCOME TO + CICS) or your own message text is to be displayed on the screen. + required: false + type: str + gmtran: + description: + - The GMTRAN system initialization parameter specifies the ID of a transaction. + required: false + type: str + gntran: + description: + - The GNTRAN system initialization parameter specifies the transaction that you want CICS to invoke when a + user's terminal-timeout period expires, and instructs CICS whether to keep a pseudo-conversation in use at + a terminal that is the subject of a timeout sign-off. + required: false + type: str + grname: + description: + - The GRNAME system initialization parameter specifies the z/OS Communications Server generic resource name, + as 1 through 8 characters, under which a group of CICS terminal-owning regions in a CICSplex register to + z/OS Communications Server. + required: false + type: str + grplist: + description: + - The GRPLIST system initialization parameter specifies the names of up to four lists of resource definition + groups on the CICS system definition file (CSD). The resource definitions in all the groups in the + specified lists are loaded during initialization when CICS performs a cold start. If a warm or emergency + start is performed, the resource definitions are derived from the global catalog, and the GRPLIST + parameter is ignored. + required: false + type: str + gtftr: + description: + - The GTFTR system initialization parameter specifies whether CICS can use the MVS generalized trace + facility (GTF) as a destination for trace data. + required: false + type: str + choices: + - "OFF" + - "ON" + hpo: + description: + - The HPO system initialization parameter specifies whether you want to use the z/OS Communications Server + authorized path feature of the high performance option (HPO). + required: false + type: str + choices: + - "NO" + - "YES" + httpserverhdr: + description: + - The HTTPSERVERHDR system initialization parameter specifies the value (up to 64 characters) that CICS sets + in the server header of HTTP responses. + required: false + type: str + httpusragenthdr: + description: + - The HTTPUSRAGENTHDR system initialization parameter specifies the value (up to 64 characters) that CICS + sets in the user-agent header of HTTP requests. + required: false + type: str + icp: + description: + - The ICP system initialization parameter specifies that you want to perform a cold start for interval + control program. + required: false + type: str + choices: + - COLD + icv: + description: + - The ICV system initialization parameter specifies the region exit time interval in milliseconds. + required: false + type: int + icvr: + description: + - The ICVR system initialization parameter specifies the default runaway task time interval in milliseconds + as a decimal number. + required: false + type: int + icvtsd: + description: + - The ICVTSD system initialization parameter specifies the terminal scan delay value. + required: false + type: int + infocenter: + description: + - The INFOCENTER system initialization parameter specifies the location of the online . If you add this + parameter to the Web User Interface (WUI) CICS startup JCL, a link labeled Information Center is displayed + on WUI views and menus. If you do not code this parameter, CICS does not construct links to IBM + Documentation. + required: false + type: str + initparm: + description: + - The INITPARM system initialization parameter specifies parameters that are to be passed to application + programs that use the ASSIGN INITPARM command. + required: false + type: str + intrdrjobuser: + description: + - The INTRDRJOBUSER system initialization parameter instructs whether to use the task user ID or the + CICS® region user ID as the job user ID for a JOB card that is submitted, without a USER parameter, + by using SPOOLOPEN with USERID("INTRDR") and SPOOLWRITE. The default is the task user ID unless set + otherwise by INTRDRJOBUSER. + required: false + type: str + choices: + - "TASK" + - "REGION" + inttr: + description: + - The INTTR system initialization parameter specifies whether the internal CICS trace destination is to be + activated at system initialization. + required: false + type: str + choices: + - "ON" + - "OFF" + ircstrt: + description: + - The IRCSTRT system initialization parameter specifies whether IRC is to be started up at system + initialization. + required: false + type: str + choices: + - "NO" + - "YES" + isc: + description: + - The ISC system initialization parameter specifies whether the CICS programs required for multiregion + operation (MRO) and are to be included. + required: false + type: str + choices: + - "NO" + - "YES" + jesdi: + description: + - The JESDI system initialization parameter specifies, in a SIT for an alternate XRF system, the JES delay + interval. + required: false + type: int + jvmprofiledir: + description: + - The JVMPROFILEDIR system initialization parameter specifies the name (up to 240 characters long) of a z/OS + UNIX directory that contains the JVM profiles + for CICS. CICS searches this directory for the profiles it needs to configure JVMs. + required: false + type: str + kerberosuser: + description: + - The KERBEROSUSER system initialization parameter specifies the user ID that is associated with the + Kerberos service principal for the CICS region. + required: false + type: str + keyring: + description: + - The KEYRING system initialization parameter specifies the fully qualified name of the key ring, within the + RACF database, that contains the keys and X.509 certificates used by CICS support for the Secure Sockets + Layer (SSL) and for web services security. The region user ID that will use the key ring must either own + the key ring or have the authority to use the key ring if it is owned by a different region user ID. You + can create an initial key ring with the DFH$RING exec in .CICS.SDFHSAMP. + required: false + type: str + lgdfint: + description: + - The LGDFINT system initialization parameter specifies the log defer interval to be used by CICS® log + manager when determining how long to delay a forced journal write request before invoking the MVS™ system + logger. + required: false + type: int + lgnmsg: + description: + - The LGNMSG system initialization parameter specifies whether z/OS Communications Server logon data is + to be made available to an application program. + required: false + type: str + choices: + - "NO" + - "YES" + llacopy: + description: + - The LLACOPY system initialization parameter specifies the situations where CICS uses either the LLACOPY + macro or the BLDL macro when locating modules in the DFHRPL or dynamic LIBRARY concatenation. + required: false + type: str + choices: + - "YES" + - "NO" + - NEWCOPY + localccsid: + description: + - The LOCALCCSID system initialization parameter specifies the default CCSID for the local region. + required: false + type: int + lpa: + description: + - The LPA system initialization parameter specifies whether CICS and user modules can be used from the link + pack areas. + required: false + type: str + choices: + - "NO" + - "YES" + maxopentcbs: + description: + - The MAXOPENTCBS system initialization parameter specifies the maximum number, in the range 32 through + 4032, of open task control blocks (open TCBs) CICS® can create in the pool of L8 and L9 mode TCBs. + required: false + type: int + maxsockets: + description: + - The MAXSOCKETS system initialization parameter specifies the maximum number of IP sockets that can be + managed by the CICS sockets domain. + required: false + type: int + maxssltcbs: + description: + - The MAXSSLTCBS system initialization parameter specifies the maximum number of S8 TCBs that can run in the + SSL pool. + required: false + type: int + maxxptcbs: + description: + - The MAXXPTCBS system initialization parameter specifies the maximum number, in the range 1 through 2000, + of open X8 and X9 TCBs that can exist concurrently in the CICS region. + required: false + type: int + mct: + description: + - The MCT system initialization parameter specifies the monitoring control table suffix. + required: false + type: str + mintlslevel: + description: + - The MINTLSLEVEL system initialization parameter specifies the minimum TLS protocol that CICS uses for + secure TCP/IP connections. + required: false + type: str + choices: + - TLS11 + - TLS12 + - TLS13 + mn: + description: + - The MN system initialization parameter specifies whether monitoring is to be switched 'ON' or 'OFF' at + initialization. + required: false + type: str + choices: + - "OFF" + - "ON" + mnconv: + description: + - The MNCONV system initialization parameter specifies whether conversational tasks have separate + performance class records produced for each pair of terminal control I/O requests. + required: false + type: str + choices: + - "NO" + - "YES" + mnexc: + description: + - The MNEXC system initialization parameter specifies whether the monitoring exception class is to be made + active during initialization. + required: false + type: str + choices: + - "OFF" + - "ON" + mnfreq: + description: + - The MNFREQ system initialization parameter specifies the interval for which CICS automatically produces a + transaction performance class record for any long-running transaction. + required: false + type: int + mnidn: + description: + - The MNIDN system initialization parameter specifies whether the monitoring identity class is to be made + active during CICS initialization. + required: false + type: str + choices: + - "OFF" + - "ON" + mnper: + description: + - The MNPER system initialization parameter specifies whether the monitoring performance class is to be made + active during CICS initialization. + required: false + type: str + choices: + - "OFF" + - "ON" + mnres: + description: + - The MNRES system initialization parameter specifies whether transaction resource monitoring is to be made + active during CICS initialization. + required: false + type: str + choices: + - "OFF" + - "ON" + mnsync: + description: + - The MNSYNC system initialization parameter specifies whether you want CICS to produce a transaction + performance class record when a transaction takes an implicit or explicit syncpoint (unit-of-work). + required: false + type: str + choices: + - "NO" + - "YES" + mntime: + description: + - The MNTIME system initialization parameter specifies whether you want the time stamp fields in the + performance class monitoring data to be returned to an application using the EXEC CICS COLLECT STATISTICS + MONITOR(taskno) command in either GMT or local time. + required: false + type: str + choices: + - GMT + - LOCAL + mqconn: + description: + - The MQCONN system initialization parameter specifies whether you want CICS to start a connection to + automatically during initialization. + required: false + type: str + choices: + - "NO" + - "YES" + mrobtch: + description: + - The MROBTCH system initialization parameter specifies the number of events that must occur before CICS is + posted for dispatch because of the batching mechanism. + required: false + type: int + mrofse: + description: + - The MROFSE system initialization parameter specifies whether you want to extend the lifetime of the + long-running mirror to keep it allocated until the end of the task rather than after a user syncpoint for + function shipping applications. + required: false + type: str + choices: + - "NO" + - "YES" + mrolrm: + description: + - The MROLRM system initialization parameter specifies whether you want to establish an MRO long-running + mirror task. + required: false + type: str + choices: + - "NO" + - "YES" + msgcase: + description: + - The MSGCASE system initialization parameter specifies how you want the message domains to display mixed + case messages. + required: false + type: str + choices: + - MIXED + - UPPER + msglvl: + description: + - The MSGLVL system initialization parameter specifies the message level that controls the generation of + messages to the console and JES message log. + required: false + type: int + choices: + - "1" + - "0" + mxt: + description: + - The MXT system initialization parameter specifies the maximum number, in the range 10 through 2000, of + user tasks that can exist in a CICS system at the same time. The MXT value does not include CICS system + tasks. + required: false + type: int + natlang: + description: + - The NATLANG system initialization parameter specifies the single-character code for the language to be + supported in this CICS run. + required: false + type: str + choices: + - E + - C + - K + ncpldft: + description: + - The NCPLDFT system initialization parameter specifies the name of the default named counter pool to be + used by the CICS region 'ON' calls it makes to a named counter server. + required: false + type: str + newsit: + description: + - The NEWSIT system initialization parameter specifies whether CICS is to load the specified SIT, and + enforce the use of all system initialization parameters, modified by any system initialization parameters + provided by PARM, SYSIN, or the system console, even in a warm start. + required: false + type: str + choices: + - "NO" + - "YES" + nistsp800131a: + description: + - The NISTSP800131A system initialization parameter specifies whether the CICS region is to check for + conformance to the NIST SP800-131A standard. + required: false + type: str + choices: + - NOCHECK + - CHECK + nonrlsrecov: + description: + - The NONRLSRECOV system initialization parameter specifies whether VSAM catalog recovery options should + override those specified on the CICS FILE resource definition for all non-RLS files. Default behavior, + with NONRLSRECOV=VSAMCAT, will take recovery attributes from the catalog if they are present, and + from the file definition otherwise. RLS files must always specify recovery options on the catalog. + required: false + type: str + choices: + - VSAMCAT + - FILEDEF + nqrnl: + description: + - The NQRNL system initialization parameter controls resource name list (RNL) processing by z/OS global + resource serialization, which can cause the scope value of a resource to change. CICS uses z/OS global + resource serialization to provide sysplex-wide protection of application resources. + required: false + type: str + choices: + - "NO" + - "YES" + offsite: + description: + - The 'OFF'SITE system initialization parameter specifies whether CICS is to restart in 'OFF'-site recovery + mode; that is, a restart is taking place at a remote site. + required: false + type: str + choices: + - "NO" + - "YES" + opertim: + description: + - The OPERTIM system initialization parameter specifies the write-to-operator timeout value, in the range 0 + through 86400 seconds (24 hours). + required: false + type: int + opndlim: + description: + - The OPNDLIM system initialization parameter specifies the destination and close destination request limit. + required: false + type: int + parmerr: + description: + - The PARMERR system initialization parameter specifies what action you want to follow if CICS detects + incorrect system initialization parameter overrides during initialization. + required: false + type: str + choices: + - INTERACT + - IGNORE + - ABEND + pdi: + description: + - The PDI system initialization parameter specifies the XRF primary delay interval, in seconds, in a SIT for + an active CICS region. + required: false + type: int + pdir: + description: + - The PDIR system initialization parameter specifies a suffix for the PDIR list. + required: false + type: str + pgaictlg: + description: + - The PGAICTLG system initialization parameter specifies whether autoinstalled program definitions should be + cataloged. + required: false + type: str + choices: + - MODIFY + - NONE + - ALL + pgaiexit: + description: + - The PGAIEXIT system initialization parameter specifies the name of the program autoinstall exit program. + required: false + type: str + pgaipgm: + description: + - The PGAIPGM system initialization parameter specifies the state of the program autoinstall function at + initialization. + required: false + type: str + choices: + - INACTIVE + - ACTIVE + pgchain: + description: + - The PGCHAIN system initialization parameter specifies the character string that is identified by terminal + control as a BMS terminal page-chaining command. + required: false + type: str + pgcopy: + description: + - The PGCOPY system initialization parameter specifies the character string that is identified by terminal + control as a BMS command to copy output from one terminal to another. + required: false + type: str + pgpurge: + description: + - The PGPURGE system initialization parameter specifies the character string that is identified by terminal + control as a BMS terminal page-purge command. + required: false + type: str + pgret: + description: + - The PGRET system initialization parameter specifies the character string that is recognized by terminal + control as a BMS terminal page-retrieval command. + required: false + type: str + pltpi: + description: + - The PLTPI system initialization parameter specifies the suffix for, or the full name of, a program list + table that contains a list of programs to be run in the final stages of system initialization. + required: false + type: str + pltpisec: + description: + - The PLTPISEC system initialization parameter specifies whether you want CICS to perform command security + or resource security checking for PLT programs during CICS initialization. + required: false + type: str + choices: + - NONE + - CMDSEC + - RESSEC + - ALL + pltpiusr: + description: + - The PLTPIUSR system initialization parameter specifies the user ID that CICS uses for security checking + for PLT programs that run during CICS initialization. + required: false + type: str + pltsd: + description: + - The PLTSD system initialization parameter specifies the suffix for, or full name of, a program list table + that contains a list of programs to be run during system termination. + required: false + type: str + prgdlay: + description: + - The PRGDLAY system initialization parameter specifies the BMS purge delay time interval that is added t + the specified delivery time to determine when a message is to be considered undeliverable and therefore + purged. + required: false + type: int + print: + description: + - The PRINT system initialization parameter specifies the method of requesting printout of the contents of + a 3270 screen. + required: false + type: str + choices: + - "NO" + - "YES" + - PA1 + - PA2 + - PA3 + prtyage: + description: + - The PRTYAGE system initialization parameter specifies the number of milliseconds to be used in the + priority aging algorithm that is used to increment the priority of a task. + required: false + type: int + prvmod: + description: + - The PRVMOD system initialization parameter specifies the names of those modules that are not to be used + from the LPA. + required: false + type: str + psbchk: + description: + - The PSBCHK system initialization parameter specifies whether CICS is to perform PSB authorization checks + for remote terminal users who use transaction routing to initiate a transaction in this CICS region to + access an attached IMS system. + required: false + type: str + choices: + - "NO" + - "YES" + psdint: + description: + - The PSDINT system initialization parameter specifies the persistent session delay interval, which states + if, and for how long, z/OS CommunicationsServer holds sessions in a recovery-pending state. + required: false + type: int + pstype: + description: + - The PSTYPE system initialization parameter specifies whether CICS uses z/OS Communications Server + single-node persistent sessions (SNPS), multinode persistent sessions (MNPS), or does not use z/OS + Communications Server persistent sessions support (NOPS). + required: false + type: str + choices: + - SNPS + - MNPS + - NOPS + pvdelay: + description: + - The PVDELAY system initialization parameter specifies the persistent verification delay as a value in the + range 0 through 10080 minutes (up to 7 days). + required: false + type: int + quiestim: + description: + - The QUIESTIM system initialization parameter specifies a timeout value for data set quiesce requests. + required: false + type: int + racfsync: + description: + - The RACFSYNC system initialization parameter specifies whether CICS listens for type 71 ENF events and + refreshes user security. + required: false + type: str + choices: + - "YES" + - "NO" + - "CPSM" + ramax: + description: + - The RAMAX system initialization parameter specifies the size in bytes of the I/O area allocated for each + RECEIVE ANY issued by CICS, in the range 0 through 32767 bytes. + required: false + type: int + rapool: + description: + - The RAPOOL system initialization parameter specifies the number of concurrent receive-any requests that + CICS is to process from the z/OS Communications Server for SNA. + required: false + type: str + rdsasze: + description: + - The RDSASZE system initialization parameter specifies the size of the RDSA. + required: false + type: str + rentpgm: + description: + - The RENTPGM system initialization parameter specifies whether you want CICS to allocate the read-only DSAs + from read-only key-0 protected storage. + required: false + type: str + choices: + - PROTECT + - NOPROTECT + resoverrides: + description: + - The RESOVERRIDES system initialization parameter specifies the 1-64 character name of the resource + overrides file. For more information, see . + required: false + type: str + resp: + description: + - The RESP system initialization parameter specifies the type of request that CICS terminal control receives + from logical units. + required: false + type: str + choices: + - FME + - RRN + ressec: + description: + - The RESSEC system initialization parameter specifies whether you want CICS to honor the RESSEC option + specified on a transaction's resource definition. + required: false + type: str + choices: + - ASIS + - ALWAYS + rls: + description: + - The RLS system initialization parameter specifies whether CICS is to support VSAM record-level sharing + (RLS). + required: false + type: str + choices: + - "NO" + - "YES" + rlstolsr: + description: + - The RLSTOLSR system initialization parameter specifies whether CICS is to include files that are to be + opened in RLS mode when calculating the number + of buffers, strings, and other resources for an LSR pool. + required: false + type: str + choices: + - "NO" + - "YES" + rmtran: + description: + - The RMTRAN system initialization parameter specifies the name of the transaction that you want an + alternate CICS to initiate when logged-on class 1 terminals, which are defined with the attribute + RECOVNOTIFY(TRANSACTION) specified, are switched following a takeover. + required: false + type: str + rrms: + description: + - The RRMS system initialization parameter specifies whether CICS is to register as a resource manager with + recoverable resource management services (RRMS). + required: false + type: str + choices: + - "NO" + - "YES" + rst: + description: + - The RST system initialization parameter specifies a recoverable service table suffix. + required: false + type: str + rstsignoff: + description: + - The RSTSIGNOFF system initialization parameter specifies whether all users signed-on to the active CICS + region are to remain signed-on following a persistent sessions restart or an XRF takeover. + required: false + type: str + choices: + - NOFORCE + - FORCE + rstsigntime: + description: + - The RSTSIGNTIME parameter specifies the timeout delay interval for signon retention during a persistent + sessions restart or an XRF takeover. + required: false + type: int + ruwapool: + description: + - The RUWAPOOL parameter specifies the option for allocating a storage pool the first time a program invoked + by Language Environment runs in a task. + required: false + type: str + choices: + - "NO" + - "YES" + sdsasze: + description: + - The SDSASZE system initialization parameter specifies the size of the SDSA. + required: false + type: str + sdtran: + description: + - The SDTRAN system initialization parameter specifies the name of the shutdown transaction to be started at + the beginning of normal and immediate shutdown. + required: false + type: str + sec: + description: + - The SEC system initialization parameter specifies what level of external security you want CICS to use. + required: false + type: str + choices: + - "YES" + - "NO" + secprfx: + description: + - The SECPRFX system initialization parameter specifies whether CICS prefixes the resource names in any + authorization requests to RACF. + required: false + type: str + sit: + description: + - The SIT system initialization parameter specifies the suffix, if any, of the system initialization table + that you want CICS to load at the start of initialization. + required: false + type: str + skrxxxx: + description: + - The SKRxxxx system initialization parameter specifies that a single-keystroke-retrieval operation is + required. + - 'Provide a dictionary with the key specifying a key on the 3270 keyboard and the value identifying a page + retrieval command that the 3270 key represents. For example, PF20: PGPURGE' + - The valid keys you can specify are PA1 through PA3, and PF1 through PF24. + required: false + type: dict + snpreset: + description: + - The SNPRESET system initialization parameter specifies whether preset userid terminals share a single + access control environment element (ACEE) that is associated with the userid, or a unique ACEE for every + terminal. + required: false + type: str + choices: + - UNIQUE + - SHARED + snscope: + description: + - The SNSCOPE system initialization parameter specifies whether a userid can be signed on to CICS more than + once, within the scope of a single CICS region, a single MVS image, and a sysplex. + required: false + type: str + choices: + - NONE + - CICS + - MVSIMAGE + - SYSPLEX + sotuning: + description: + - The SOTUNING system initialization parameter specifies whether performance tuning for HTTP connections + will occur to protect CICS from unconstrained resource demand. + required: false + type: str + choices: + - "YES" + - "520" + spctr: + description: + - The SPCTR system initialization parameter specifies the level of special tracing required for CICS as a + whole. + required: false + type: str + spctrxx: + description: + - The SPCTRxx system initialization parameter specifies the level of special tracing activated for a particular CICS + component. When you enable special tracing for a transaction, a terminal, or both, the trace points of this component + at the specified trace level are eligible to make trace calls at any given point in the process of a special tracing task. + - 'Provide a dictionary with the key specifying a two-letter code that represents a component and the value specifying the + trace level. For example: AP=1-2' + - You can provide several dictionaries to specify the level of special tracing for several components. Each component + is defined by one dictionary. + - For information about CICS components and their respetive two-letter code, see + L(Component names and abbreviations,https://www.ibm.com/docs/en/cics-ts/6.1?topic=component-names-abbreviations). + required: false + type: dict + spool: + description: + - The SPOOL system initialization parameter specifies whether the system spooling interface is required. + required: false + type: str + choices: + - "NO" + - "YES" + srbsvc: + description: + - The SRBSVC system initialization parameter specifies the number that you have assigned to the CICS type 6 + SVC. + required: false + type: int + srt: + description: + - The SRT system initialization parameter specifies the system recovery table suffix. + required: false + type: str + srvercp: + description: + - The SRVERCP system initialization parameter specifies the default server code page to be used by the + DFHCNV data conversion table but only if the SRVERCP parameter in the DFHCNV macro is set to SYSDEF. + required: false + type: str + sslcache: + description: + - The SSLCACHE system initialization parameter specifies whether session IDs for SSL sessions are to be + cached locally or at sysplex level for reuse by the CICS® region. The SSL cache allows CICS to perform + abbreviated handshakes with clients that it has previously authenticated. + required: false + type: str + choices: + - CICS + - SYSPLEX + ssldelay: + description: + - The SSLDELAY system initialization parameter specifies the length of time in seconds for which CICS + retains session ids for secure socket connections. + required: false + type: int + start: + description: + - The START system initialization parameter specifies the type of start for the system initialization + program. + required: false + type: str + choices: + - AUTO + - INITIAL + - COLD + - STANDBY + - (INITIAL, ALL) + - (AUTO, ALL) + - (COLD, ALL) + - (STANDBY, ALL) + starter: + description: + - The STARTER system initialization parameter specifies whether the generation of starter system modules + (with $ and # suffixes) is permitted, and various MNOTES are suppressed. + required: false + type: str + choices: + - "YES" + - "NO" + stateod: + description: + - The STATEOD system initialization parameter specifies the end-of-day time in the format hhmmss. + required: false + type: int + statint: + description: + - The STATINT system initialization parameter specifies the recording interval for system statistics in the + format hhmmss. + required: false + type: int + statrcd: + description: + - The STATRCD system initialization parameter specifies the interval statistics recording status at CICS + initialization. + required: false + type: str + choices: + - "OFF" + - "ON" + stgprot: + description: + - The STGPROT system initialization parameter specifies whether you want storage protection to operate in + the CICS region. + required: false + type: str + choices: + - "YES" + - "NO" + stgrcvy: + description: + - The STGRCVY system initialization parameter specifies whether CICS should try to recover from a storage + violation. + required: false + type: str + choices: + - "NO" + - "YES" + stntr: + description: + - The STNTR system initialization parameter specifies the level of standard tracing required for CICS as a + whole. + required: false + type: str + stntrxx: + description: + - The STNTRxx system initialization parameter specifies the level of standard tracing for a particular CICS component. + - 'Provide a dictionary with the key specifying a two-letter code that represents a component and the value specifying the + trace level. For example: AP=1-2' + - You can provide several dictionaries to specify the level of standard tracing for several components. Each component + is defined by one dictionary. For components that are not defined here, their standard tracing levels are determined + by STNTR. + - For information about CICS components and their respective two-letter code, see + L(Component names and abbreviations,https://www.ibm.com/docs/en/cics-ts/6.1?topic=component-names-abbreviations). + required: false + type: dict + subtsks: + description: + - The SUBTSKS system initialization parameter specifies the number of task control blocks (TCBs) you want + CICS to use for running tasks in concurrent mode. + required: false + type: int + choices: + - "0" + - "1" + suffix: + description: + - The SUFFIX system initialization parameter specifies the last two characters of the name of this system + initialization table. + required: false + type: str + sysidnt: + description: + - The SYSIDNT system initialization parameter specifies a 1- to 4-character name that is known only to your + CICS region. + required: false + type: str + systr: + description: + - The SYSTR system initialization parameter specifies the setting of the main system trace flag. + required: false + type: str + choices: + - "ON" + - "OFF" + sydumax: + description: + - The SYDUMAX system initialization parameter specifies the limit on the number of system dumps that can be + taken per dump table entry. + required: false + type: int + takeovr: + description: + - The TAKEOVR system initialization parameter specifies the action to be taken by the alternate CICS region, + following the apparent loss of the surveillance signal in the active CICS region. + required: false + type: str + choices: + - MANUAL + - AUTO + - COMMAND + tbexits: + description: + - The TBEXITS system initialization parameter specifies the names of your backout exit programs for use + during emergency restart backout processing. + required: false + type: str + tcp: + description: + - The TCP system initialization parameter specifies whether the pregenerated non-z/OS Communications Server + terminal control program, DFHTCP, is to be included. + required: false + type: str + choices: + - "NO" + - "YES" + tcpip: + description: + - The TCPIP system initialization parameter specifies whether CICS TCP/IP services are to be activated at + CICS startup. + required: false + type: str + choices: + - "YES" + - "NO" + tcsactn: + description: + - The TCSACTN system initialization parameter specifies the required action that CICS terminal control + should take if the terminal control shutdown wait threshold expires. + required: false + type: str + choices: + - NONE + - UNBIND + - FORCE + tcswait: + description: + - The TCSWAIT system initialization parameter specifies the required CICS terminal control shutdown wait + threshold. + required: false + type: str + tct: + description: + - The TCT system initialization parameter specifies which terminal control table, if any, is to be loaded. + required: false + type: str + tctuakey: + description: + - The TCTUAKEY system initialization parameter specifies the storage key for the terminal control table user + areas (TCTUAs) if you are operating CICS with storage protection (STGPROT=YES). + required: false + type: str + choices: + - USER + - CICS + tctualoc: + description: + - The TCTUALOC system initialization parameter specifies where terminal user areas (TCTUAs) are to be + stored. + required: false + type: str + choices: + - BELOW + - ANY + td: + description: + - The TD system initialization parameter specifies the number of VSAM buffers and strings to be used for + intrapartition transient data (TD). + required: false + type: str + tdintra: + description: + - The TDINTRA system initialization parameter specifies whether CICS is to initialize with empty + intrapartition TD queues. + required: false + type: str + choices: + - NOEMPTY + - EMPTY + traniso: + description: + - The TRANISO system initialization parameter specifies, together with the STGPROT system initialization + parameter, whether you want transaction isolation in the CICS region. + required: false + type: str + choices: + - "NO" + - "YES" + trap: + description: + - The TRAP system initialization parameter specifies whether the FE global trap exit is to be activated at + system initialization. + required: false + type: str + choices: + - "OFF" + - "ON" + trdumax: + description: + - The TRDUMAX system initialization parameter specifies the limit on the number of transaction dumps that + may be taken per Dump Table entry. + required: false + type: int + trtabsz: + description: + - The TRTABSZ system initialization parameter specifies the size, in kilobytes, of the internal trace table. + required: false + type: int + trtransz: + description: + - The TRTRANSZ system initialization parameter specifies the size, in kilobytes, of the transaction dump + trace table. + required: false + type: int + trtranty: + description: + - The TRTRANTY system initialization parameter specifies which trace entries should be copied from the + internal trace table to the transaction dump trace table. + required: false + type: str + choices: + - TRAN + - ALL + ts: + description: + - The TS system initialization parameter specifies whether you want to perform a cold start for temporary + storage, as well as the number of VSAM buffers and strings to be used for auxiliary temporary storage. + required: false + type: str + tsmainlimit: + description: + - The TSMAINLIMIT system initialization parameter specifies a limit for the storage that is available for + main temporary storage queues to use. You can specify an amount of storage in the range 1 - 32768 MB + (32 GB), but this amount must not be greater than 25% of the value of the z/OS parameter MEMLIMIT. + The default is 64 MB. + required: false + type: str + tst: + description: + - The TST system initialization parameter specifies the temporary storage table suffix. + required: false + type: str + udsasze: + description: + - The UDSASZE system initialization parameter specifies the size of the UDSA. + required: false + type: str + uownetql: + description: + - The UOWNETQL system initialization parameter specifies a qualifier for the NETUOWID for units of work + initiated on the local CICS region. + required: false + type: str + usertr: + description: + - The USERTR system initialization parameter specifies whether the main user trace flag is to be set on or + off. + required: false + type: str + choices: + - "ON" + - "OFF" + usrdelay: + description: + - The USRDELAY system initialization parameter specifies the maximum time, in the range 0 - 10080 minutes + (up to seven days), that an eligible user ID and its associated attributes are cached in the CICS region + after use. A user ID that is retained in the user table can be reused. + required: false + type: int + ussconfig: + description: + - The USSCONFIG system initialization parameter specifies the name and path of the root directory for + configuration files on z/OS UNIX. + required: false + type: str + usshome: + description: + - The USSHOME system initialization parameter specifies the name and path of the root directory for + files on z/OS UNIX. + required: false + type: str + vtam: + description: + - The VTAM system initialization parameter specifies whether the z/OS Communications Server access method is + to be used. + required: false + type: str + choices: + - "YES" + - "NO" + vtprefix: + description: + - The VTPREFIX system initialization parameter specifies the first character to be used for the terminal + identifiers (termids) of autoinstalled virtual terminals. + required: false + type: str + webdelay: + description: + - The WEBDELAY system initialization parameter specifies two Web delay periods. + required: false + type: str + wlmhealth: + description: + - The WLMHEALTH system initialization parameter specifies the time interval and the health adjustment value + to be used by CICS® on z/OS® Workload Manager Health API (IWM4HLTH) calls, which CICS makes to inform z/OS + WLM about the health state of a CICS region. + required: false + type: str + wrkarea: + description: + - The WRKAREA system initialization parameter specifies the number of bytes to be allocated to the common + work area (CWA). + required: false + type: int + xappc: + description: + - The XAPPC system initialization parameter specifies whether RACF session security can be used when + establishing APPC sessions. + required: false + type: str + choices: + - "NO" + - "YES" + xcfgroup: + description: + - The XCFGROUP system initialization parameter specifies the name of the cross-system coupling facility + (XCF) group to be joined by this region. + required: false + type: str + xcmd: + description: + - The XCMD system initialization parameter specifies whether you want CICS to perform command security + checking, and optionally the RACF resource class name in which you have defined the command security + profiles. + required: false + type: str + xdb2: + description: + - The XDB2 system initialization parameter specifies whether you want CICS to perform DB2ENTRY security + checking. + required: false + type: str + xdct: + description: + - The XDCT system initialization parameter specifies whether you want CICS to perform resource security + checking for transient data queues. + required: false + type: str + xfct: + description: + - The XFCT system initialization parameter specifies whether you want CICS to perform file resource security + checking, and optionally specifies the RACF resource class name in which you have defined the file + resource security profiles. + required: false + type: str + xhfs: + description: + - The XHFS system initialization parameter specifies whether CICS is to check the transaction user's ability + to access files in the z/OS UNIX System Services file system. + required: false + type: str + choices: + - "YES" + - "NO" + xjct: + description: + - The XJCT system initialization parameter specifies whether you want CICS to perform journal resource + security checking. + required: false + type: str + xlt: + description: + - The XLT system initialization parameter specifies a suffix for the transaction list table. + required: false + type: str + xpct: + description: + - The XPCT system initialization parameter specifies whether you want CICS to perform started transaction + resource security checking, and optionally specifies the name of the RACF resource class name in which you + have defined the started task security profiles. + required: false + type: str + xppt: + description: + - The XPPT system initialization parameter specifies that CICS is to perform application program resource + security checks and optionally specifies the RACF resource class name in which you have defined the + program resource security profiles. + required: false + type: str + xpsb: + description: + - The XPSB system initialization parameter specifies whether you want CICS to perform program specification + block (PSB) security checking and optionally specifies the RACF resource class name in which you have + defined the PSB security profiles. + required: false + type: str + xptkt: + description: + - The XPTKT system initialization parameter specifies whether CICS checks if a user can generate a + PassTicket for the user's userid using the EXEC CICS REQUEST PASSTICKET command, the EXEC CICS REQUEST + ENCRYPTPTKT command, or the EXEC FEPI REQUEST PASSTICKET command. + required: false + type: str + choices: + - "YES" + - "NO" + xres: + description: + - The XRES system initialization parameter specifies whether you want CICS to perform resource security + checking for particular CICS resources and optionally specifies the general resource class name in which + you have defined the resource security profiles. + required: false + type: str + xrf: + description: + - The XRF system initialization parameter specifies whether XRF support is to be included in the CICS + region. + required: false + type: str + choices: + - "NO" + - "YES" + xtran: + description: + - The XTRAN system initialization parameter specifies whether you want CICS to perform transaction security + checking and optionally specifies the RACF resource class name in which you have defined the transaction + security profiles. + required: false + type: str + xtst: + description: + - The XTST system initialization parameter specifies whether you want CICS to perform security checking for + temporary storage queues and optionally specifies + the RACF resource class name in which you have defined the temporary storage security profiles. + required: false + type: str + xuser: + description: + - The XUSER system initialization parameter specifies whether CICS is to perform surrogate user checks. + required: false + type: str + choices: + - "YES" + - "NO" + epcdsasze: + description: + - The EPCDSASZE parameter specifies the size of the EPCDSA dynamic storage area. Message DFHSM0136I at + initialization shows the value that is set. + required: false + type: str + epudsasze: + description: + - The EPUDSASZE parameter specifies the size of the EPUDSA dynamic storage area. Message DFHSM0136I at + initialization shows the value that is set. + required: false + type: str + maxtlslevel: + description: + - The MAXTLSLEVEL system initialization parameter specifies the maximum TLS protocol that CICS uses for + secure TCP/IP connections. + required: false + type: str + choices: + - TLS11 + - TLS12 + - TLS13 + pcdsasze: + description: + - The PCDSASZE parameter specifies the size of the PCDSA dynamic storage area. Message DFHSM0136I at + initialization shows the value that is set. + required: false + type: int + pudsasze: + description: + - The PUDSASZE parameter specifies the size of the PUDSA dynamic storage area. Message DFHSM0136I at + initialization shows the value that is set. + required: false + type: str + sdtmemlimit: + description: + - The SDTMEMLIMIT system initialization parameter specifies a limit to the amount of storage above the bar + that is available for shared data tables to use for control information (entry descriptors, backout + elements, and index nodes). The default is 4 GB. When you set this parameter, check your current setting + for the z/OS MEMLIMIT parameter. + required: false + type: str + zosmoninterval: + description: + - The ZOSMONINTERVAL system initialization parameter specifies the sampling interval, in seconds, for + the CICS® z/OS storage monitor task. + required: false + type: int + zossosnewtcb: + description: + - The ZOSSOSNEWTCB system initialization parameter specifies the action that CICS® takes in response to + a new open TCB that is being attached directly by CICS when the z/OS® user region storage or extended + user region storage is short on storage (SOS). These open TCBs are L8, L9, X8 and X9 TCBs. + required: false + type: str + choices: + - "DELAY" + - "NODELAY" + zossos24unalloc: + description: + - The ZOSSOS24UNALLOC system initialization parameter specifies short-on-storage (SOS) thresholds in KB + for the total amount of unallocated z/OS® user region storage and for the largest contiguous storage + area available in it. + required: false + type: str + zossos31unalloc: + description: + - The ZOSSOS31UNALLOC system initialization parameter specifies short-on-storage (SOS) thresholds in KB + for the total amount of unallocated z/OS® extended user region storage and for the largest contiguous + storage area available in it. + required: false + type: str + zossos64unalloc: + description: + - The ZOSSOS64UNALLOC system initialization parameter specifies a short-on-storage (SOS) threshold in + MB for the amount of unallocated z/OS® MEMLIMIT storage in the 64-bit addressing range. + required: false + type: int +""" diff --git a/plugins/doc_fragments/td_intrapartition.py b/plugins/doc_fragments/td_intrapartition.py new file mode 100644 index 00000000..4a59e6f6 --- /dev/null +++ b/plugins/doc_fragments/td_intrapartition.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the transient data intrapartition data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the transient data intrapartition data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 100 + space_secondary: + description: + - The size of the secondary space allocated to the transient data intrapartition data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the transient data intrapartition data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 10 + space_type: + description: + - The unit portion of the transient data intrapartition data set size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) and the value + for the secondary space is specified with O(space_secondary). + - This option takes effect only when the transient data intrapartition data set is being created. + If the data set already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + records (V(REC)), cylinders (V(CYL)), or tracks (V(TRK)). + required: false + type: str + choices: + - M + - K + - REC + - CYL + - TRK + default: REC + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + type: raw + required: false + region_data_sets: + description: + - The location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). + - If you want to use a data set that already exists, ensure that the data set is a transient data intrapartition data set. + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets with a template. + required: false + type: str + dfhintra: + description: + - Overrides the templated location for the transient data intrapartition data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of the transient data intrapartition to override the template. + type: str + required: false + state: + description: + - The intended state for the transient data intrapartition data set, which the module aims to achieve. + - Specify V(absent) to remove the transient data intrapartition data set entirely, if it exists. + - Specify V(initial) to create the transient data intrapartition data set if it does not exist. + If the specified data set exists but is empty, the module leaves the data set as is. + If the specified data set exists and has contents, the module deletes the data set and then creates a new, empty one. + - Specify V(warm) to retain an existing transient data intrapartition data set in its current state. + The module verifies whether the specified data set exists and whether it contains any records. + If both conditions are met, the module leaves the data set as is. + If the data set does not exist or if it is empty, the operation fails. + choices: + - "initial" + - "absent" + - "warm" + required: true + type: str +""" diff --git a/plugins/doc_fragments/transaction_dump.py b/plugins/doc_fragments/transaction_dump.py new file mode 100644 index 00000000..10dfb504 --- /dev/null +++ b/plugins/doc_fragments/transaction_dump.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + space_primary: + description: + - The size of the primary space allocated to the transaction dump data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the transaction dump data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 20 + space_secondary: + description: + - The size of the secondary space allocated to the transaction dump data set. + Note that this is just the value; the unit is specified with O(space_type). + - This option takes effect only when the transaction dump data set is being created. + If the data set already exists, the option has no effect. + type: int + required: false + default: 4 + space_type: + description: + - The unit portion of the transaction dump data set size. Note that this is + just the unit; the value for the primary space is specified with O(space_primary) and + the value for the secondary space is specified with O(space_secondary). + - This option takes effect only when the transaction dump data set is being created. + If the data set already exists, the option has no effect. + - The size can be specified in megabytes (V(M)), kilobytes (V(K)), + cylinders (V(CYL)), or tracks (V(TRK)). + required: false + type: str + choices: + - M + - K + - CYL + - TRK + default: M + volumes: + description: + - The volume(s) where the data set is created. Use a string to define a singular volume or a list of strings for multiple volumes. + type: raw + required: false + region_data_sets: + description: + - The location of the region data sets to be created by using a template, for example, + C(REGIONS.ABCD0001.<< data_set_name >>). + type: dict + required: true + suboptions: + template: + description: + - The base location of the region data sets with a template. + required: false + type: str + dfhdmpa: + description: + - Overrides the templated location for the DFHDMPA data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of DFHDMPA to override the template. + type: str + required: false + dfhdmpb: + description: + - Overrides the templated location for the DFHDMPB data set. + required: false + type: dict + suboptions: + dsn: + description: + - The data set name of DFHDMPB to override the template. + type: str + required: false + destination: + description: + - Identifies which one of the transaction dump data sets is the target of the operation. + If the value is left blank, A is implied, but you can specify A or B. + - Specify V(A) to create or delete the A data set. + - Specify V(B) to create or delete the B data set. This MUST be set for the creation of the B data set. + choices: + - "A" + - "B" + type: str + required: false + default: "A" + state: + description: + - The intended state for the transaction dump data set, which the module aims to achieve. + - Specify V(absent) to remove the transaction dump data set entirely, if it exists. + - Specify V(initial) to create the transaction dump data set if it does not exist. + If the specified data set exists but is empty, the module leaves the data set as is. + If the specified data set exists and has contents, the module deletes the data set and then creates a new, empty one. + - Specify V(warm) to retain an existing transaction dump data set in its current state. + The module verifies whether the specified data set exists and whether it contains any records. + If both conditions are met, the module leaves the data set as is. + If the data set does not exist or if it is empty, the operation fails. + choices: + - "initial" + - "absent" + - "warm" + required: true + type: str + """ diff --git a/plugins/module_utils/_aux_temp_storage.py b/plugins/module_utils/_aux_temp_storage.py new file mode 100644 index 00000000..96d02c8c --- /dev/null +++ b/plugins/module_utils/_aux_temp_storage.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY.utils. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +def _get_idcams_cmd_temp(data_set): # type: (dict) -> dict + defaults = { + "CLUSTER": { + "RECORDSIZE": "{0} {1}".format( + RECORD_COUNT_DEFAULT, + RECORD_SIZE_DEFAULT, + ), + "NONINDEXED": None, + "CONTROLINTERVALSIZE": str(CONTROL_INTERVAL_SIZE_DEFAULT), + "SHAREOPTIONS": "{0} {1}".format( + SHARE_CROSSREGION, + SHARE_CROSSSYSTEM, + ), + }, + "DATA": {"UNIQUE": None}, + } + defaults.update(data_set) + return defaults + + +RECORD_COUNT_DEFAULT = 4089 +RECORD_SIZE_DEFAULT = 4089 +CONTROL_INTERVAL_SIZE_DEFAULT = 4096 +SHARE_CROSSREGION = 2 +SHARE_CROSSSYSTEM = 3 diff --git a/plugins/module_utils/_aux_trace.py b/plugins/module_utils/_aux_trace.py new file mode 100644 index 00000000..8c250739 --- /dev/null +++ b/plugins/module_utils/_aux_trace.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition +__metaclass__ = type + + +def _build_seq_data_set_definition_aux_trace(data_set): # type: (dict) -> DatasetDefinition + return DatasetDefinition( + dataset_name=data_set["name"], + primary=data_set["primary"], + secondary=data_set["secondary"], + primary_unit=data_set["unit"], + secondary_unit=data_set["unit"], + volumes=data_set.get("volumes"), + block_size=BLOCK_SIZE_DEFAULT, + record_length=RECORD_LENGTH_DEFAULT, + record_format=RECORD_FORMAT, + disposition=DISPOSITION, + normal_disposition=NORMAL_DISP, + conditional_disposition=CONDITION_DISP, + type=TYPE + ) + + +BLOCK_SIZE_DEFAULT = 4096 +RECORD_LENGTH_DEFAULT = 4096 +RECORD_FORMAT = "FB" +TYPE = "SEQ" +DISPOSITION = "NEW" +NORMAL_DISP = "CATALOG" +CONDITION_DISP = "DELETE" diff --git a/plugins/module_utils/_cicsgetversion.py b/plugins/module_utils/_cicsgetversion.py new file mode 100644 index 00000000..0ba937d1 --- /dev/null +++ b/plugins/module_utils/_cicsgetversion.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type +try: + from zoautil_py.datasets import read + from zoautil_py.exceptions import ZOAUException +except ImportError as imp_exc: + ZOAUTIL_IMPORT_ERROR = imp_exc +else: + ZOAUTIL_IMPORT_ERROR = None + + +def get_dataset_member_version_record(dataset): # type: (str) -> str + try: + result = read("%s.SDFHSAMP(DFH0SINX)" % dataset).split("STATUS = ", 1)[1].split(" ")[0] + if not result or result == "": + raise Exception("CICS version was blank") + elif len(result) >= 10: + raise Exception("CICS version was too long") + else: + return result + except ZOAUException as e: + raise Exception("Error reading data set for calculating CICS version - {0}".format(e)) diff --git a/plugins/module_utils/_csd.py b/plugins/module_utils/_csd.py new file mode 100644 index 00000000..ba688795 --- /dev/null +++ b/plugins/module_utils/_csd.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import ( + MVSExecutionException, + _execution, +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import ( + DataDefinition, + DatasetDefinition, + DDStatement, + StdinDefinition, + StdoutDefinition, +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import ( + MVSCmd, + MVSCmdResponse, +) + + +def _get_csdup_dds(data_set, data_definition): # type: (dict, DataDefinition) -> list[DDStatement] + return [ + DDStatement('steplib', DatasetDefinition(data_set["sdfhload"], disposition="SHR")), + DDStatement('dfhcsd', DatasetDefinition(dataset_name=data_set["name"], disposition="SHR")), + DDStatement('sysprint', StdoutDefinition()), + DDStatement('sysudump', StdoutDefinition()), + DDStatement('sysin', data_definition), + ] + + +def _run_dfhcsdup(data_set, data_definition): # type: (dict, DataDefinition) -> list[_execution] + executions = [] + dfhcsdup_response = _execute_dfhcsdup(data_set, data_definition) + + executions.append(_execution( + name="Run DFHCSDUP", + rc=dfhcsdup_response.rc, + stdout=dfhcsdup_response.stdout, + stderr=dfhcsdup_response.stderr)) + + if dfhcsdup_response.rc >= 8: + raise MVSExecutionException( + "DFHCSDUP failed with RC {0}".format( + dfhcsdup_response.rc + ), executions + ) + return executions + + +def _execute_dfhcsdup(data_set, data_definition): # type: (dict, DataDefinition) -> MVSCmdResponse + return MVSCmd.execute( + pgm="DFHCSDUP", + dds=_get_csdup_dds(data_set, data_definition), + verbose=True, + debug=False) + + +def _get_csdup_initilize_cmd(): # type: () -> DataDefinition + return StdinDefinition(content="INITIALIZE") + + +def _get_idcams_cmd_csd(dataset): # type: (dict) -> dict + defaults = { + "CLUSTER": { + "RECORDSIZE": "{0} {1}".format(RECORD_COUNT_DEFAULT, RECORD_SIZE_DEFAULT), + "INDEXED": None, + "KEYS": "{0} {1}".format(KEY_LENGTH, KEY_OFFSET), + "FREESPACE": "{0} {1}".format(CI_PERCENT, CA_PERCENT), + "SHAREOPTIONS": str(SHARE_CROSSREGION), + "REUSE": None + }, + "DATA": { + "CONTROLINTERVALSIZE": str(CONTROL_INTERVAL_SIZE_DEFAULT) + }, + "INDEX": { + None + } + } + if dataset.get("log") is not None: + defaults["CLUSTER"]["LOG"] = dataset["log"] + if dataset.get("logstream_id") is not None and dataset["log"] == "ALL": + defaults["CLUSTER"]["LOGSTREAMID"] = dataset["logstream_id"] + + defaults.update(dataset) + return defaults + + +RECORD_COUNT_DEFAULT = 200 +RECORD_SIZE_DEFAULT = 2000 +CONTROL_INTERVAL_SIZE_DEFAULT = 8192 +KEY_LENGTH = 22 +KEY_OFFSET = 0 +CI_PERCENT = 10 +CA_PERCENT = 10 +SHARE_CROSSREGION = 2 diff --git a/plugins/module_utils/_data_set.py b/plugins/module_utils/_data_set.py new file mode 100644 index 00000000..92d60964 --- /dev/null +++ b/plugins/module_utils/_data_set.py @@ -0,0 +1,330 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) + + +__metaclass__ = type + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import ( + _build_idcams_define_cmd, + _run_idcams, + _run_listds, + _run_iefbr14 +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._icetool import _run_icetool +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import BetterArgParser +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition + +SDFHLOAD = "sdfhload" +STATE = "state" +SPACE_PRIMARY = "space_primary" +SPACE_SECONDARY = "space_secondary" +SPACE_TYPE = "space_type" +VOLUMES = "volumes" +KILOBYTES = "K" +MEGABYTES = "M" +RECORDS = "REC" +CYLINDERS = "CYL" +TRACKS = "TRK" +SPACE_OPTIONS = [KILOBYTES, MEGABYTES, CYLINDERS, TRACKS] +ABSENT = "absent" +INITIAL = "initial" +WARM = "warm" +STATE_OPTIONS = [ABSENT, INITIAL, WARM] +CICS_DATA_SETS = "cics_data_sets" +REGION_DATA_SETS = "region_data_sets" +DESTINATION = "destination" +DESTINATION_OPTIONS = ["A", "B"] +DESTINATION_DEFAULT_VALUE = "A" + + +class DataSet(): + def __init__(self, primary, secondary): + self.name = "" + self.target_state = "" + self.exists = False + self.data_set_organization = "" + self.expected_data_set_organization = "" + self.unit = "" + self.primary = primary + self.secondary = secondary + self.volumes = None + self.sdfhload = "" + self.destination = "" + + self.changed = False + self.failed = False + self.start_state = dict(exists=False, data_set_organization=self.data_set_organization) + self.end_state = dict(exists=False, data_set_organization=self.data_set_organization) + self.executions = list() + self.region_param = dict() + self.msg = "" + + self._module = AnsibleModule( + argument_spec=self._get_arg_spec(), + ) + self.process_volume_arg() + self.validate_parameters() + + def get_result(self): # type: () -> dict + return { + "changed": self.changed, + "failed": self.failed, + "executions": self.executions, + "start_state": self.start_state, + "end_state": self.end_state, + "msg": self.msg, + } + + def get_data_set(self): # type: () -> dict + return { + "name": self.name, + "state": self.target_state, + "exists": self.exists, + "data_set_organization": self.data_set_organization, + "unit": self.unit, + "primary": self.primary, + "secondary": self.secondary, + "volumes": self.volumes, + "sdfhload": self.sdfhload, + } + + def set_start_state(self): # type: () -> None + self.start_state = dict( + exists=self.exists, + data_set_organization=self.data_set_organization + ) + + def set_end_state(self): # type: () -> None + self.end_state = dict( + exists=self.exists, + data_set_organization=self.data_set_organization + ) + + def _fail(self, msg): # type: (str) -> None + self.failed = True + self.msg = msg + self.set_end_state() + self.result = self.get_result() + self._module.fail_json(**self.result) + + def _exit(self): # type: () -> None + self.set_end_state() + self.result = self.get_result() + self._module.exit_json(**self.result) + + def _get_arg_spec(self): # type: () -> dict + """ + Get the arg spec, which is the set of arguments that can be passed into the Ansible module + """ + return { + SPACE_PRIMARY: { + "type": "int", + }, + SPACE_SECONDARY: { + "type": "int", + }, + SPACE_TYPE: { + "type": "str", + "choices": SPACE_OPTIONS, + }, + VOLUMES: { + "type": "raw" + }, + STATE: { + "type": "str", + "required": True, + "choices": STATE_OPTIONS + }, + REGION_DATA_SETS: { + "type": "dict", + "required": True, + "options": { + "template": { + "type": "str", + "required": False, + } + }, + } + } + + def get_arg_defs(self): # type: () -> dict + """ + Get the arg defs, which is a copy of the arg spec, but with certain types changed to the ones used by BetterArgParser + """ + defs = self._get_arg_spec() + if defs.get(CICS_DATA_SETS): + defs[CICS_DATA_SETS]["options"]["sdfhload"].update({ + "arg_type": "data_set_base" + }) + defs[CICS_DATA_SETS]["options"]["sdfhload"].pop("type") + + defs[VOLUMES].pop("type") + defs[VOLUMES]["arg_type"] = "list" + defs[VOLUMES]["elements"] = "volume" + return defs + + def process_volume_arg(self): + """ + Ensure Volumes is a string or list of strings + """ + if self._module.params.get(VOLUMES): + volumes_param = self._module.params[VOLUMES] + if isinstance(volumes_param, str): + self._module.params[VOLUMES] = volumes_param.split() + + def validate_parameters(self): # type: () -> dict + """ + Use BetterArgParser to parse the parameters passed in, which also does some validation + """ + try: + params = BetterArgParser(self.get_arg_defs()).parse_args(self._module.params) + except ValueError as e: + self._fail(str(e)) + self.assign_parameters(params) + + def assign_parameters(self, params): # type: (dict) -> None + """ + Assign parameters to the relevant fields + """ + # Mandatory parameters + self.target_state = params[STATE] + self.region_param = params[REGION_DATA_SETS] + + # Optional parameters + if params.get(SPACE_PRIMARY): + self.primary = params[SPACE_PRIMARY] + if params.get(SPACE_SECONDARY): + self.secondary = params[SPACE_SECONDARY] + if params.get(SPACE_TYPE): + self.unit = params[SPACE_TYPE] + if params.get(CICS_DATA_SETS) and params.get(CICS_DATA_SETS).get("sdfhload"): + self.sdfhload = params[CICS_DATA_SETS]["sdfhload"].upper() + if params.get(VOLUMES): + self.volumes = params[VOLUMES] + if params.get(DESTINATION): + self.destination = params[DESTINATION] + + def create_data_set(self): # type: () -> None + _build_idcams_define_cmd({}) + + def build_vsam_data_set(self, create_cmd): # type: (str) -> None + try: + message = "Creating {0} data set".format(self.name) + idcams_executions = _run_idcams( + cmd=create_cmd, + name=message, + location=self.name, + delete=False) + self.executions.extend(idcams_executions) + + self.changed = True + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def build_seq_data_set(self, ddname, definition): # type: (str, DatasetDefinition) -> None + try: + iefbr14_executions = _run_iefbr14(ddname, definition) + self.executions.extend(iefbr14_executions) + + self.changed = True + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def delete_data_set(self): # type: () -> None + if self.exists: + delete_cmd = ''' + DELETE {0} + '''.format(self.name) + + try: + idcams_executions = _run_idcams( + cmd=delete_cmd, + name=self.name, + location=self.name, + delete=True) + self.executions.extend(idcams_executions) + self.changed = True + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def init_data_set(self): # type: () -> None + if self.exists: + try: + icetool_executions, record_count = _run_icetool(self.name) + self.executions.extend(icetool_executions) + if record_count > 0: + self.delete_data_set() + self.update_data_set_state() + self.create_data_set() + + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + else: + self.create_data_set() + + def warm_data_set(self): # type: () -> None + if not self.exists: + self._fail("Data set {0} does not exist.".format(self.name)) + + def warm_with_records(self): + if self.exists: + try: + icetool_executions, record_count = _run_icetool(self.name) + self.executions.extend(icetool_executions) + if record_count <= 0: + self._fail("Data set {0} is empty.".format(self.name)) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + else: + self._fail("Data set {0} does not exist.".format(self.name)) + + def invalid_target_state(self): # type: () -> None + self._fail("{0} is not a valid target state.".format( + self.target_state)) + + def execute_target_state(self): # type: () -> None + if self.target_state == ABSENT: + self.delete_data_set() + elif self.target_state == INITIAL: + self.init_data_set() + elif self.target_state == WARM: + self.warm_data_set() + else: + self.invalid_target_state() + + def update_data_set_state(self): # type: () -> None + try: + listds_executions, self.exists, self.data_set_organization = _run_listds(self.name) + + self.executions.extend(listds_executions) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def main(self): # type: () -> None + self.update_data_set_state() + self.set_start_state() + + if self.exists and (self.data_set_organization != self.expected_data_set_organization): + self._fail( + "Data set {0} is not in expected format {1}.".format( + self.name, self.expected_data_set_organization)) + + self.execute_target_state() + + self.update_data_set_state() + + self._exit() diff --git a/plugins/module_utils/_data_set_utils.py b/plugins/module_utils/_data_set_utils.py new file mode 100644 index 00000000..3b203d8a --- /dev/null +++ b/plugins/module_utils/_data_set_utils.py @@ -0,0 +1,312 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) + +__metaclass__ = type +import re +import tempfile +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import AnsibleModuleHelper +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution, MVSExecutionException +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmd +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DDStatement, StdoutDefinition, DatasetDefinition, StdinDefinition + +MVS_CMD_RETRY_ATTEMPTS = 10 + + +DSORG = { + "PS": "Sequential", + "PO": "Partitioned", + "IS": "Indexed Sequential", + "DA": "Direct Access", + "VSAM": "VSAM", + "??": "Other" +} + + +def _run_idcams(cmd, name, location, delete=False): # type: (str, str, str, bool) -> list[dict[str, str| int]] + executions = [] + + for x in range(MVS_CMD_RETRY_ATTEMPTS): + idcams_response = _execute_idcams(cmd=cmd) + executions.append( + _execution( + name="IDCAMS - {0} - Run {1}".format( + name, + x + 1), + rc=idcams_response.rc, + stdout=idcams_response.stdout, + stderr=idcams_response.stderr)) + if location.upper() in idcams_response.stdout.upper(): + break + + if location.upper() not in idcams_response.stdout.upper(): + raise MVSExecutionException("IDCAMS Command output not recognised", executions) + + if delete: + pattern = r"^.+ENTRY\(A|C|D|I\){0}DELETED+$".format(location.upper()) + if idcams_response.rc == 8 and "ENTRY{0}NOTFOUND".format( + location.upper()) in idcams_response.stdout.upper().replace( + " ", + "").replace( + "\n", + ""): + return executions + elif idcams_response.rc != 0 or not bool(re.search(pattern, idcams_response.stdout.upper().replace( + " ", + "").replace( + "\n", + ""))): + raise MVSExecutionException("RC {0} when deleting data set".format(idcams_response.rc), executions) + else: + if idcams_response.rc == 12 and "NOTDEFINEDBECAUSEDUPLICATENAMEEXISTSINCATALOG" in idcams_response.stdout.upper( + ).replace(" ", "").replace("\n", ""): + return executions + if idcams_response.rc != 0: + raise MVSExecutionException("RC {0} when creating data set".format(idcams_response.rc), executions) + + return executions + + +def _get_idcams_dds(cmd): + return [ + DDStatement('sysin', StdinDefinition(content=cmd)), + DDStatement('sysprint', StdoutDefinition()), + ] + + +def _execute_idcams(cmd): + return MVSCmd.execute_authorized( + pgm="IDCAMS", + dds=_get_idcams_dds(cmd), + verbose=True, + debug=False + ) + + +def _get_listds_dds(cmd): + return [ + DDStatement('systsin', StdinDefinition(content=cmd)), + DDStatement('systsprt', StdoutDefinition()), + ] + + +def _execute_listds(cmd): + return MVSCmd.execute_authorized( + pgm="IKJEFT01", + dds=_get_listds_dds(cmd), + verbose=True, + debug=False + ) + + +def _get_dataset_size_unit(unit_symbol): # type: (str) -> str + return { + "M": "MEGABYTES", + "K": "KILOBYTES", + "CYL": "CYLINDERS", + "REC": "RECORDS", + "TRK": "TRACKS" + }.get(unit_symbol, "MEGABYTES") + + +def _build_idcams_define_cmd(dataset): # type: (dict) -> str + defineStr = "\n DEFINE{0}{1}{2}\n ".format( + _build_idcams_define_cluster_parms(dataset), + _build_idcams_define_data_parms(dataset), + _build_idcams_define_index_parms(dataset)) + return defineStr + + +def _build_idcams_define_cluster_parms(dataset): # type: (dict) -> str + if dataset.get("volumes"): + volumes_cmd = _build_idcams_volumes(dataset["volumes"]) + else: + volumes_cmd = "" + + clusterStr = " CLUSTER (NAME({0}) -\n {1}({2} {3}){4}{5})".format( + dataset["name"], + _get_dataset_size_unit(dataset["unit"]), + dataset["primary"], + dataset["secondary"], + _build_idcams_define_parms(dataset, "CLUSTER"), + volumes_cmd) + return clusterStr + + +def _build_idcams_define_data_parms(dataset): # type: (dict) -> str + dataStr = " -\n DATA (NAME({0}.DATA){1})".format( + dataset["name"], + _build_idcams_define_parms(dataset, "DATA")) + return dataStr + + +def _build_idcams_define_index_parms(dataset): # type: (dict) -> str + if dataset.get("INDEX", None): + indexStr = " -\n INDEX (NAME({0}.INDEX){1})".format( + dataset["name"], + _build_idcams_define_parms(dataset, "INDEX")) + else: + indexStr = "" + return indexStr + + +def _build_idcams_define_parms(dataset, parm): # type: (dict, str) -> str + parmsStr = "" + if isinstance(dataset[parm], dict): + for key, value in dataset[parm].items(): + if value is not None: + parmsStr += " -\n {0}({1})".format(key, value) + elif key is not None: + parmsStr += " -\n {0}".format(key) + return parmsStr + + +def _build_idcams_volumes(volumes): # type: (list[str]) -> str + volumes_cmd = "" + if len(volumes) > 1: + for vol in volumes: + volumes_cmd += (vol + " ") + else: + volumes_cmd = volumes[0] + return " -\n VOLUMES({0})".format(volumes_cmd.rstrip()) + + +def _get_data_set_type(listds_stdout): + data_set_type = "" + matches = re.findall(r"\s+(PS|PO|IS|DA|VSAM|\?\?)\s+", listds_stdout) + + if (len(matches) != 0): + try: + data_set_type = DSORG[matches[0]] + except KeyError: + data_set_type = "Unspecified" + else: + data_set_type = "Unspecified" + return data_set_type + + +def _run_listds(location): # type: (str) -> tuple[list[_execution], bool, str] + cmd = " LISTDS '{0}'".format(location) + executions = [] + + for x in range(MVS_CMD_RETRY_ATTEMPTS): + listds_response = _execute_listds(cmd=cmd) + executions.append( + _execution( + name="IKJEFT01 - Get Data Set Status - Run {0}".format( + x + 1), + rc=listds_response.rc, + stdout=listds_response.stdout, + stderr=listds_response.stderr)) + if location.upper() in listds_response.stdout.upper(): + break + + if location.upper() not in listds_response.stdout.upper(): + raise MVSExecutionException("LISTDS Command output not recognised", executions) + + # DS Name in output, good output + + if listds_response.rc == 8 and "NOT IN CATALOG" in listds_response.stdout: + return executions, False, "NONE" + + if listds_response.rc == 4 and "MEMBER NAME NOT FOUND" in listds_response.stdout: + return executions, False, "NONE" + + # Exists + + if listds_response.rc != 0: + raise MVSExecutionException("RC {0} running LISTDS Command".format(listds_response.rc), executions) + + # Exists, RC 0 + data_set_organization = _get_data_set_type(listds_response.stdout) + + return executions, True, data_set_organization + + +def _run_iefbr14(ddname, definition): # type: (str, DatasetDefinition) -> list[dict[str, str| int]] + + executions = [] + + for x in range(MVS_CMD_RETRY_ATTEMPTS): + iefbr14_response = _execute_iefbr14(ddname, definition) + executions.append( + _execution( + name="IEFBR14 - {0} - Run {1}".format( + ddname, + x + 1), + rc=iefbr14_response.rc, + stdout=iefbr14_response.stdout, + stderr=iefbr14_response.stderr)) + if iefbr14_response.stdout != "" or iefbr14_response.stderr != "": + break + + if iefbr14_response.stdout == "" and iefbr14_response.stderr == "": + raise MVSExecutionException("IEFBR14 Command output not recognised", executions) + + if iefbr14_response.rc != 0: + raise MVSExecutionException( + "RC {0} when creating sequential data set".format( + iefbr14_response.rc), executions) + + return executions + + +def _get_iefbr14_dds(ddname, definition): # type: (str, DatasetDefinition) -> list[DDStatement] + return [DDStatement(ddname, definition)] + + +def _execute_iefbr14(ddname, definition): + return MVSCmd.execute( + pgm="IEFBR14", + dds=_get_iefbr14_dds(ddname, definition), + verbose=True, + debug=False + ) + + +def _execute_command(command): + module = AnsibleModuleHelper(argument_spec={}) + return module.run_command(command) + + +def _read_data_set_content(data_set_name): + executions = [] + command = "dcat '{0}'".format(data_set_name) + + rc, stdout, stderr = _execute_command(command) + executions.append( + _execution( + name="Read data set {0}".format(data_set_name), + rc=rc, + stdout=stdout, + stderr=stderr)) + if rc != 0: + raise MVSExecutionException( + "RC {0} when reading content from data set {1}".format( + rc, data_set_name), executions) + return executions, stdout + + +def _write_jcl_to_data_set(jcl, data_set_name): + """Writes generated JCL content to the specified data set + """ + executions = [] + + temp = tempfile.NamedTemporaryFile(delete=True) + with open(temp.name, "w") as f: + f.write(jcl) + rc, stdout, stderr = _execute_command("cp -O u {0} \"//'{1}'\"".format(temp.name, data_set_name)) + executions.append( + _execution( + name="Copy JCL contents to data set", + rc=rc, + stdout=stdout, + stderr=stderr)) + if rc != 0: + raise MVSExecutionException("Failed to copy JCL content to data set", executions) + return executions diff --git a/plugins/module_utils/_global_catalog.py b/plugins/module_utils/_global_catalog.py new file mode 100644 index 00000000..a05ee3e7 --- /dev/null +++ b/plugins/module_utils/_global_catalog.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmd, MVSCmdResponse +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import StdoutDefinition, DatasetDefinition, DDStatement, InputDefinition +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException, _execution +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import MVS_CMD_RETRY_ATTEMPTS + + +def _get_value_from_line(line): # type: (list[str]) -> str | None + val = None + if len(line) == 1: + val = line[0].split(":")[1] + return val + + +def _get_filtered_list(elements, target): # type: (list[str],str) -> list[str] + return list(filter(lambda x: target in x, elements)) + + +def _get_rmutl_dds( + location, + sdfhload, + cmd): # type: (str, str, str) -> list[DDStatement] + return [ + DDStatement('steplib', DatasetDefinition(sdfhload)), + DDStatement('dfhgcd', DatasetDefinition(location)), + DDStatement('sysin', InputDefinition(content=cmd)), + DDStatement('sysprint', StdoutDefinition()), + ] + + +def _get_reason_code(stdout_lines_arr): # type: (list[str]) -> str | None + if len(stdout_lines_arr) == 0: + return None + + stdout_comma_sep = list(stdout_lines_arr[0].split(",")) + filtered_for_reason_code = list( + filter(lambda x: "REASON:X" in x, stdout_comma_sep)) + if len(filtered_for_reason_code) == 0: + return None + + reason_code = [element.replace("0", "") + for element in filtered_for_reason_code[0].split("'")] + return reason_code[1] + + +def _get_catalog_records(stdout): # type: (str) -> tuple[str | None, str | None] + elements = ['{0}'.format(element.replace(" ", "").upper()) + for element in stdout.split("\n")] + + autostart_filtered = _get_filtered_list( + elements, "AUTO-STARTOVERRIDE:") + nextstart_filtered = _get_filtered_list(elements, "NEXTSTARTTYPE:") + + autostart_override = _get_value_from_line( + autostart_filtered) + nextstart = _get_value_from_line(nextstart_filtered) + + return (autostart_override, nextstart) + + +def _run_dfhrmutl( + location, # type: str + sdfhload, # type: str + cmd="" # type: str +): + # type: (...) -> tuple[list[dict[str, str| int]], tuple[str | None, str | None]] | list[dict[str, str| int]] + + executions = [] + + for x in range(MVS_CMD_RETRY_ATTEMPTS): + dfhrmutl_response = _execute_dfhrmutl(location, sdfhload, cmd) + executions.append( + _execution( + name="DFHRMUTL - {0} - Run {1}".format( + "Get current catalog" if cmd == "" else "Updating autostart override", + x + 1), + rc=dfhrmutl_response.rc, + stdout=dfhrmutl_response.stdout, + stderr=dfhrmutl_response.stderr)) + + if dfhrmutl_response.rc == 0: + break + if dfhrmutl_response.rc == 16: + formatted_stdout_lines = [ + "{0}".format(element.replace(" ", "").upper()) + for element in dfhrmutl_response.stdout.split("\n") + ] + stdout_with_rc = list(filter(lambda x: "REASON:X" in x, formatted_stdout_lines)) + + reason_code = _get_reason_code(stdout_with_rc) + if reason_code and reason_code != "A8": + raise MVSExecutionException( + "DFHRMUTL failed with RC 16 - {0}".format(stdout_with_rc[0]), executions + ) + elif reason_code is None: + raise MVSExecutionException( + "DFHRMUTL failed with RC 16 but no reason code was found", + executions, + ) + + else: + raise MVSExecutionException( + "DFHRMUTL failed with RC {0}".format( + dfhrmutl_response.rc), executions) + + if cmd != "": + return executions + + return executions, _get_catalog_records(dfhrmutl_response.stdout) + + +def _execute_dfhrmutl(location, sdfhload, cmd=""): # type: (str, str, str) -> MVSCmdResponse + return MVSCmd.execute( + pgm="DFHRMUTL", + dds=_get_rmutl_dds(location=location, sdfhload=sdfhload, cmd=cmd), + verbose=True, + debug=False) + + +def _get_idcams_cmd_gcd(dataset): # type: (dict) -> dict + defaults = { + "CLUSTER": { + "RECORDSIZE": "{0} {1}".format(RECORD_COUNT_DEFAULT, RECORD_SIZE_DEFAULT), + "INDEXED": None, + "KEYS": "{0} {1}".format(KEY_LENGTH, KEY_OFFSET), + "FREESPACE": "{0} {1}".format(CI_PERCENT, CA_PERCENT), + "SHAREOPTIONS": str(SHARE_CROSSREGION), + "REUSE": None + }, + "DATA": { + "CONTROLINTERVALSIZE": str(CONTROL_INTERVAL_SIZE_DEFAULT) + }, + "INDEX": { + None + } + } + defaults.update(dataset) + return defaults + + +RECORD_COUNT_DEFAULT = 4089 +RECORD_SIZE_DEFAULT = 32760 +CONTROL_INTERVAL_SIZE_DEFAULT = 32768 +KEY_LENGTH = 52 +KEY_OFFSET = 0 +CI_PERCENT = 10 +CA_PERCENT = 10 +SHARE_CROSSREGION = 2 diff --git a/plugins/module_utils/_icetool.py b/plugins/module_utils/_icetool.py new file mode 100644 index 00000000..c39ec0ee --- /dev/null +++ b/plugins/module_utils/_icetool.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmd, MVSCmdResponse +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import StdoutDefinition, DatasetDefinition, DDStatement, InputDefinition +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution, MVSExecutionException +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import MVS_CMD_RETRY_ATTEMPTS + + +def _get_icetool_dds(location): # type: (str) -> list[DDStatement] + return [ + DDStatement('sysprint', StdoutDefinition()), + DDStatement('dd1', DatasetDefinition(dataset_name=location, disposition="SHR")), + DDStatement('toolmsg', StdoutDefinition()), + DDStatement('dfsmsg', StdoutDefinition()), + DDStatement('showdef', StdoutDefinition()), + DDStatement('toolin', InputDefinition(content="COUNT FROM(DD1)")), + ] + + +def _get_reason_code(filtered): # type: (list[str]) -> str + if len(filtered) == 0: + return "" + + elements2 = list(filtered[0].split(',')) + filtered2 = list(filter(lambda x: "REASON:X" in x, elements2)) + if len(filtered2) == 0: + return "" + + elements3 = [element.replace("0", "") + for element in filtered2[0].split("'")] + return elements3[1] + + +def _get_record_count(stdout): # type: (str) -> int + record_count = -1 + elements = ['{0}'.format(element.replace(" ", "").upper()) + for element in stdout.split("\n")] + + lines = list(filter(lambda x: "RECORDCOUNT:" in x, elements)) + if len(lines) > 0: + record_count = int(lines[0].split(":")[1]) + + return record_count + + +def _run_icetool(location): # type: (str) -> tuple[list[_execution], int] + executions = [] + + for x in range(MVS_CMD_RETRY_ATTEMPTS): + icetool_response = _execute_icetool(location) + + executions.append( + _execution( + name="ICETOOL - Get record count - Run {0}".format(x + 1), + rc=icetool_response.rc, + stdout=icetool_response.stdout, + stderr=icetool_response.stderr)) + + if icetool_response.rc != 0: + elements = ["{0}".format(element.replace(" ", "").upper()) + for element in icetool_response.stdout.split("\n")] + filtered = list(filter(lambda x: "REASON:X" in x, elements)) + + reason_code = _get_reason_code(filtered) + if reason_code != "": + raise MVSExecutionException( + "ICETOOL failed with RC {0} - {1}".format(icetool_response.rc, filtered[0]), executions) + else: + raise MVSExecutionException( + "ICETOOL failed with RC {0}".format(icetool_response.rc), executions) + elif icetool_response.stdout != "": + break + + if (icetool_response.stdout == "") and (icetool_response.stderr == ""): + raise MVSExecutionException("ICETOOL Command output not recognised", executions) + + return executions, _get_record_count(icetool_response.stdout) + + +def _execute_icetool(location): # type: (str) -> MVSCmdResponse + return MVSCmd.execute( + pgm="ICETOOL", + dds=_get_icetool_dds(location=location), + verbose=True, + debug=False) diff --git a/plugins/module_utils/_jcl_helper.py b/plugins/module_utils/_jcl_helper.py new file mode 100644 index 00000000..5b1e0f07 --- /dev/null +++ b/plugins/module_utils/_jcl_helper.py @@ -0,0 +1,377 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import re + +ACCOUNTING_INFORMATION = 'accounting_information' +CONTENT = 'content' +CONCAT_JCL_PREFIX = '// ' +DD = 'DD' +DD_INSTREAM = 'DD *' +DDS = 'dds' +DLM = 'dlm' +END_INSTREAM = '/*' +EXEC = 'EXEC' +EXECS = 'execs' +GMTEXT = 'GMTEXT' +JCL_PREFIX = '//' +JOB = 'JOB' +JOB_NAME = 'job_name' +JOB_CARD = 'job_card' +MAX_LINE_LENGTH = 72 +MSGLEVEL = 'msglevel' +NAME = 'name' +NO_DD_NAME = ' ' +PROGRAMMER_NAME = 'programmer_name' + + +class JCLHelper: + def __init__(self): + self.job_data = {JOB_CARD: {}, + EXECS: [], + } + self.jcl = [] + + def render_jcl(self): + """Renders the JCL from the JCLHelper.job_data structure. Appends all JCL to the JCL parameter of this class. + """ + self._write_job_statement(self.job_data[JOB_CARD]) + self._write_exec_statements(self.job_data[EXECS]) + self._write_null_statement() + + def _write_job_statement(self, job_parameters): + job_statement = JCLHelper._build_job_statement(job_parameters) + self._write_list_of_strings( + JCLHelper._split_long_dd_statement(job_statement)) + + def _write_dds(self, dds): + """Writes dd statements to the JCL. + + Parameters + ---------- + dds : list + List of dictionaries, containing the dd information. + Example Syntax: + [{"DDNAME1": [{"dsn": DATA.NAME", "disp": "SHR"}], + {"DDNAME2": [{"content": ["INSTREAM DATA", "TO WRITE"]}], + {"DDNAME3" : [{"dsn": "DATA.NAME.1"}, {"dsn" : "DATA.NAME.2"}]}] + """ + for dd_card in dds: + for dd_name, data in dd_card.items(): + if isinstance(data, dict): + # Intream data isn't stored in a list of dicts, it's stored in a dict. + if data.get(CONTENT): + self._write_instream_data(dd_name, data) + elif len(data) == 1: + self._write_dd_statement(dd_name, data[0]) + else: + self._write_dd_concatenation(dd_name, data) + + def _write_exec_statements(self, list_of_exec_dicts): + """Builds and writes an EXEC Statement, with any additional parameters, and writes the corresponding + dd statements. + Parameters + ---------- + list_of_exec_dicts : list + List of dictionaries with all the exec parameters, and a dictionary of dd statements. + """ + dd_dict = None + for exec_statement in list_of_exec_dicts: + if exec_statement.get(DDS) or exec_statement.get(DDS) is not {}: + dd_dict = exec_statement.pop(DDS) + exec_statement_string = JCLHelper._build_exec_statement_string(exec_statement) + + self._write_list_of_strings( + JCLHelper._split_long_dd_statement(exec_statement_string)) + if dd_dict: + self._write_dds(dd_dict) + + @staticmethod + def _build_exec_statement_string(exec_dict): + step_name = JCLHelper._format_dd_name(exec_dict.pop(NAME)) + exec_string = '{0}{1}{2}'.format(JCL_PREFIX, step_name, EXEC) + parameters = JCLHelper._concatenate_key_value_pairs_into_list(exec_dict) + if parameters: + return JCLHelper._add_parameters_onto_dd_statement(exec_string, parameters, False) + else: + return exec_string + + def _write_list_of_strings(self, jcl_lines): + """Writes a list of strings to the JCL List. + + Parameters + ---------- + jcl_lines : list + The lines of JCL you want to append to your JCL string. + """ + if isinstance(jcl_lines, list): + self.jcl.extend(jcl_lines) + else: + self.jcl.append(jcl_lines) + + def _write_instream_data(self, dd_name, data): + """Writes instream data to a DD Card and adds to JCL List. + + Parameters + ---------- + dd_name : str + The name of the DD Card + data : dict + The lines of data you want appended to the JCL, and passed into the DD card. + """ + + # Write the opening line of an instream data statement + formatted_dd_name = JCLHelper._format_dd_name(dd_name) + dd_line = (JCL_PREFIX + formatted_dd_name + DD_INSTREAM) + content_for_instream = data.pop(CONTENT) + parameters = JCLHelper._concatenate_key_value_pairs_into_list(data) + dd_line = JCLHelper._add_parameters_onto_dd_statement( + dd_line, parameters, True) + self.jcl.append(dd_line) + + # Write the instream data + self._write_list_of_strings(content_for_instream) + # Apply different delimiter if necessary, or just the default + self.jcl.append(data.pop(DLM, END_INSTREAM)) + + def _write_dd_statement(self, dd_name, additional_parameters): + """Writes and builds a DD statement to the JCL List + + Parameters + ---------- + dd_name : str + The name of the DD Card + additional_parameters: dict + A dict of key value pairs, E.g. {'PARM1':'one','PARM2':'two'} + """ + + dd_statement = self._build_dd_statement(dd_name, additional_parameters) + if dd_statement: + self._write_list_of_strings( + JCLHelper._split_long_dd_statement(dd_statement)) + + def _write_dd_concatenation(self, dd_name, additional_parameters): + """Writes multiple data sets to a DD name and adds to JCL List + + Parameters + ---------- + dd_name : str + The name of the DD Card + additional_parameters: dict + A dict of key value pairs, E.g. {'PARM1':'one','PARM2':'two'} + """ + + dd_strings = self._build_dd_concatenation_list( + dd_name, additional_parameters) + self._write_list_of_strings(JCLHelper._split_long_dd_statement_list(dd_strings)) + + def _write_null_statement(self): + self.jcl.append(JCL_PREFIX) + + @staticmethod + def _build_job_statement(job_parameters): + positional_parameters = JCLHelper._format_job_positional_parameters(job_parameters) + if job_parameters.get(MSGLEVEL): + job_parameters[MSGLEVEL] = JCLHelper._format_msglevel_parameter(job_parameters[MSGLEVEL]) + # Put key values equal to one another + job_name = job_parameters.pop(JOB_NAME) + list_of_additional_parameters = JCLHelper._concatenate_key_value_pairs_into_list(job_parameters) + + job_string = '{0}{1}{2}'.format(JCL_PREFIX, JCLHelper._format_dd_name(job_name), JOB) + if positional_parameters: + job_string = '{0} {1}'.format(job_string, positional_parameters) + return JCLHelper._add_parameters_onto_dd_statement(job_string, list_of_additional_parameters, True) + return JCLHelper._add_parameters_onto_dd_statement(job_string, list_of_additional_parameters, False) + + @staticmethod + def _format_job_positional_parameters(job_parameters): + if job_parameters: + accounting_info = JCLHelper._format_accounting_information(job_parameters.pop(ACCOUNTING_INFORMATION, None)) + programmer_name = JCLHelper._format_programmer_name(job_parameters.pop(PROGRAMMER_NAME, None)) + if programmer_name: + return "{0},{1}".format(accounting_info, programmer_name) + elif accounting_info or accounting_info != "": + return accounting_info + + @staticmethod + def _format_accounting_information(acc_information): + if acc_information: + # Putting into a list as keys need to be a specific order + acc_values = [acc_information.get("pano"), acc_information.get("room"), acc_information.get("times"), + acc_information.get("lines"), acc_information.get("cards"), acc_information.get("forms"), + acc_information.get("copies"), acc_information.get("log"), acc_information.get("linect")] + result = "" + amount_of_values_added = 0 + for value in acc_values: + if value: + amount_of_values_added += 1 + result += str(value) + # Needs a comma appending as we need to either seperate values or indicate missing keys. + result += ',' + result = result.rstrip(",") + if amount_of_values_added < 2: + return result + return "({0})".format(result) + # Return an empty string so that if programmer name is set, we can handle the formatting nice and tidy. + return "" + + @staticmethod + def _format_programmer_name(programmer_name): + if programmer_name: + formatted_string = "" + for char in programmer_name: + if char == "'": + formatted_string += "''" # Duplicate the apostrophe + else: + formatted_string += char + return "'{0}'".format(formatted_string) + + @staticmethod + def _format_msglevel_parameter(msglevel_dict): + msglevel_var = msglevel_dict.get("statements", "") + messages = msglevel_dict.get("messages") + if messages is not None: + msglevel_var = "({0},{1})".format(msglevel_var, messages) + return msglevel_var + + def _build_dd_concatenation_list(self, dd_name, list_of_dicts): + # Get the dictionary in the list, to append a DD name. + concatenation_of_statements = [ + self._build_dd_statement(dd_name, list_of_dicts[0])] + + # For the rest of the dd's, no DD name needed. + for parameter_dict in list_of_dicts[1:]: + parameters_string = JCLHelper._build_parameter_string( + JCLHelper._concatenate_key_value_pairs_into_list(parameter_dict)) + current_line = '{0}{1} {2}'.format( + CONCAT_JCL_PREFIX, DD, parameters_string) + concatenation_of_statements.append(current_line) + return concatenation_of_statements + + @staticmethod + def _build_dd_statement(dd_name, additional_parameters=None): + """Builds a DD Statement string from a DD Card name, and any additional parameters to follow + + Parameters + ---------- + dd_name : str + Name of the DD Card + additional_parameters : dict + A dict of key value pairs, E.g. {'PARM1':'one','PARM2':'two'} + + Returns + ------- + str + The built DD Statement from the name and any parameters. + """ + if dd_name is None: + return None + dd_name = JCLHelper._format_dd_name(dd_name) + parameters = JCLHelper._concatenate_key_value_pairs_into_list( + additional_parameters) + parameters_string = JCLHelper._build_parameter_string(parameters) + dd_statement = '{0}{1}{2} {3}'.format( + JCL_PREFIX, dd_name, DD, parameters_string) + return dd_statement + + @staticmethod + def _exceeds_line_length(dd_statement): + return len(dd_statement) > MAX_LINE_LENGTH + + @staticmethod + def _split_long_dd_statement_list(dd_statement_list): + split_statement = [] + if isinstance(dd_statement_list, str): + return JCLHelper._split_long_dd_statement(dd_statement_list) + + for statement in dd_statement_list: + result = JCLHelper._split_long_dd_statement(statement) + if isinstance(result, list): + split_statement.extend(result) + else: + split_statement.append(result) + return split_statement + + @staticmethod + def _split_long_dd_statement(dd_statement): + split_statement = [] + if JCLHelper._exceeds_line_length(dd_statement): + words_in_statement = iter(dd_statement.split(",")) + current = next(words_in_statement) + for word in words_in_statement: + if len(current) + 2 + len(word) > MAX_LINE_LENGTH: + split_statement.append(current + ',') + current = JCL_PREFIX + NO_DD_NAME + word + else: + current = '{0}{1}{2}'.format(current, ',', word) + split_statement.append(current) + else: + split_statement.append(dd_statement) + return split_statement + + @staticmethod + def _add_parameters_onto_dd_statement(existing_dd_line, parameter_list, comma_prefix): + if parameter_list: + parameter_string = ','.join(parameter_list) + joiner = "," if comma_prefix is True else " " + existing_dd_line = '{0}{1}{2}'.format( + existing_dd_line, joiner, parameter_string) + return existing_dd_line + + @staticmethod + def _format_dd_name(dd_name): + return dd_name.upper().ljust(9, ' ') + + @staticmethod + def _build_parameter_string(parameter_list): + parameter_string = "" + if parameter_list: + parameter_string = ','.join(parameter_list) + return parameter_string + + @staticmethod + def _concatenate_key_value_pairs_into_list(dict_to_unpack): + """Puts key and value equal to one another + + Parameters + ---------- + dict_to_unpack : dict + A dictionary of key value pairs to put equal to one another inside a list. + If the value in the dictionary is another dictionary, it checks the dictionary length is a + multiple of 2 and puts 2 values equal to one another. If value is blank, key will be stored + on its own. + E.g. {"PARAM1": "ONE", "SINGLE": "" } -> ["PARAM1=1", "SINGLE"] + + Returns + ------- + list + A list of name=values, E.g. ["PARAM1=ONE", "DUMMY", "SINGLE"] + """ + list_of_pairs = [] + for k, v in dict_to_unpack.items(): + k = k.upper() + if v == "": + paired = k + else: + if k == GMTEXT: + v = JCLHelper._add_single_quotes_to_text(v) + paired = '{0}={1}'.format(k, v) + list_of_pairs.append(paired) + return list_of_pairs + + @staticmethod + def _add_single_quotes_to_text(value): + if re.match("^\"\'([^']|\'\')*\'\"$", value): + return value + value = value.strip('"').strip("'") + if "'" in value: + value = value.replace("'", "''") + + return "'{0}'".format(value) diff --git a/plugins/module_utils/_local_catalog.py b/plugins/module_utils/_local_catalog.py new file mode 100644 index 00000000..5105ddbe --- /dev/null +++ b/plugins/module_utils/_local_catalog.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import StdoutDefinition, DatasetDefinition, DDStatement +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmd +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import MVS_CMD_RETRY_ATTEMPTS +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException, _execution + + +def _get_ccmutl_dds(catalog): # type: (dict) -> list[DDStatement] + return [ + DDStatement('steplib', DatasetDefinition(catalog["sdfhload"])), + DDStatement('sysprint', StdoutDefinition()), + DDStatement('sysudump', StdoutDefinition()), + DDStatement( + 'dfhlcd', + DatasetDefinition( + dataset_name=catalog["name"], + disposition="SHR")), + ] + + +def _run_dfhccutl(starting_catalog): # type: (dict) -> list + executions = [] + + for x in range(MVS_CMD_RETRY_ATTEMPTS): + dfhccutl_response = _execute_dfhccutl(starting_catalog) + + executions.append(_execution( + name="DFHCCUTL - Initialise Local Catalog", + rc=dfhccutl_response.rc, + stdout=dfhccutl_response.stdout, + stderr=dfhccutl_response.stderr)) + + if dfhccutl_response.rc != 0: + raise MVSExecutionException( + "DFHCCUTL failed with RC {0}".format( + dfhccutl_response.rc + ), executions + ) + else: + break + + return executions + + +def _execute_dfhccutl(starting_catalog): + return MVSCmd.execute( + pgm="DFHCCUTL", + dds=_get_ccmutl_dds(catalog=starting_catalog), + verbose=True, + debug=False) + + +def _get_idcams_cmd_lcd(data_set): # type: (dict) -> dict + defaults = { + "CLUSTER": { + "RECORDSIZE": "{0} {1}".format(RECORD_COUNT_DEFAULT, RECORD_SIZE_DEFAULT), + "INDEXED": None, + "KEYS": "{0} {1}".format(KEY_LENGTH, KEY_OFFSET), + "FREESPACE": "{0} {1}".format(CI_PERCENT, CA_PERCENT), + "SHAREOPTIONS": str(SHARE_CROSSREGION), + "REUSE": None + }, + "DATA": { + "CONTROLINTERVALSIZE": str(CONTROL_INTERVAL_SIZE_DEFAULT) + }, + "INDEX": { + None + } + } + defaults.update(data_set) + return defaults + + +RECORD_COUNT_DEFAULT = 70 +RECORD_SIZE_DEFAULT = 2041 +CONTROL_INTERVAL_SIZE_DEFAULT = 2048 +KEY_LENGTH = 52 +KEY_OFFSET = 0 +CI_PERCENT = 10 +CA_PERCENT = 10 +SHARE_CROSSREGION = 2 diff --git a/plugins/module_utils/_local_request_queue.py b/plugins/module_utils/_local_request_queue.py new file mode 100644 index 00000000..789f1072 --- /dev/null +++ b/plugins/module_utils/_local_request_queue.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +def _get_idcams_cmd_lrq(data_set): # type: (dict) -> dict + defaults = { + "CLUSTER": { + "RECORDSIZE": "{0} {1}".format(RECORD_COUNT_DEFAULT, RECORD_SIZE_DEFAULT), + "INDEXED": None, + "KEYS": "{0} {1}".format(KEY_LENGTH, KEY_OFFSET), + "FREESPACE": "{0} {1}".format(CI_PERCENT, CA_PERCENT), + "SHAREOPTIONS": "{0} {1}".format(SHARE_CROSSREGION, SHARE_CROSSSYSTEM), + "REUSE": None, + "LOG": str(LOG_OPTION) + }, + "DATA": { + "CONTROLINTERVALSIZE": str(CONTROL_INTERVAL_SIZE_DEFAULT) + }, + "INDEX": { + None + } + } + defaults.update(data_set) + return defaults + + +RECORD_COUNT_DEFAULT = 2232 +RECORD_SIZE_DEFAULT = 2400 +CONTROL_INTERVAL_SIZE_DEFAULT = 2560 +KEY_LENGTH = 40 +KEY_OFFSET = 0 +CI_PERCENT = 0 +CA_PERCENT = 10 +SHARE_CROSSREGION = 2 +SHARE_CROSSSYSTEM = 3 +LOG_OPTION = "UNDO" diff --git a/plugins/module_utils/_response.py b/plugins/module_utils/_response.py new file mode 100644 index 00000000..dd76c043 --- /dev/null +++ b/plugins/module_utils/_response.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +def _execution(name, rc, stdout, stderr): # type: (str, int, str, str) -> dict + return { + "name": name, + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + + +class MVSExecutionException(Exception): + def __init__(self, message, executions): # type: (str, list[_execution]) -> None + self.message = message + self.executions = executions diff --git a/plugins/module_utils/_td_intrapartition.py b/plugins/module_utils/_td_intrapartition.py new file mode 100644 index 00000000..50a26c19 --- /dev/null +++ b/plugins/module_utils/_td_intrapartition.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +def _get_idcams_cmd_intra(data_set): # type: (dict) -> dict + defaults = { + "CLUSTER": { + "RECORDSIZE": "{0} {1}".format( + RECORD_COUNT_DEFAULT, + RECORD_SIZE_DEFAULT, + ), + "NONINDEXED": None, + "CONTROLINTERVALSIZE": str(CONTROL_INTERVAL_SIZE_DEFAULT), + }, + "DATA": {None}, + } + defaults.update(data_set) + return defaults + + +RECORD_COUNT_DEFAULT = 1529 +RECORD_SIZE_DEFAULT = 1529 +CONTROL_INTERVAL_SIZE_DEFAULT = 1536 diff --git a/plugins/module_utils/_transaction_dump.py b/plugins/module_utils/_transaction_dump.py new file mode 100644 index 00000000..74aa0fee --- /dev/null +++ b/plugins/module_utils/_transaction_dump.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import (absolute_import, division, print_function) + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition +__metaclass__ = type + + +def _build_seq_data_set_definition_transaction_dump(data_set): # type: (dict) -> DatasetDefinition + definition = DatasetDefinition( + dataset_name=data_set["name"], + primary=data_set["primary"], + secondary=data_set["secondary"], + primary_unit=data_set["unit"], + secondary_unit=data_set["unit"], + volumes=data_set.get("volumes"), + block_size=BLOCK_SIZE_DEFAULT, + record_length=RECORD_LENGTH_DEFAULT, + record_format=RECORD_FORMAT, + disposition=DISPOSITION, + normal_disposition=NORMAL_DISP, + conditional_disposition=CONDITION_DISP, + type=TYPE + ) + return definition + + +BLOCK_SIZE_DEFAULT = 4096 +RECORD_LENGTH_DEFAULT = 4092 +RECORD_FORMAT = "VB" +TYPE = "SEQ" +DISPOSITION = "NEW" +NORMAL_DISP = "CATALOG" +CONDITION_DISP = "DELETE" diff --git a/plugins/module_utils/cmci.py b/plugins/module_utils/cmci.py index 4a5347c2..d0ac5d34 100644 --- a/plugins/module_utils/cmci.py +++ b/plugins/module_utils/cmci.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import (absolute_import, division, print_function) @@ -33,7 +33,7 @@ CMCI_HOST = 'cmci_host' CMCI_PORT = 'cmci_port' CMCI_USER = 'cmci_user' -CMCI_PASSWORD = 'cmci_password' # nosec B105 +CMCI_PASSWORD = 'cmci_password' CMCI_CERT = 'cmci_cert' CMCI_KEY = 'cmci_key' CONTEXT = 'context' diff --git a/plugins/modules/aux_temp_storage.py b/plugins/modules/aux_temp_storage.py new file mode 100644 index 00000000..df683fb1 --- /dev/null +++ b/plugins/modules/aux_temp_storage.py @@ -0,0 +1,223 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = r""" +--- +module: aux_temp_storage +short_description: Create and remove the CICS auxiliary temporary storage data set +description: + - Create and remove the L(auxiliary temporary storage,https://www.ibm.com/docs/en/cics-ts/latest?topic=sets-defining-auxiliary-temporary-storage-data-set) + data set used by a CICS® region. + - You can use this module when provisioning or de-provisioning a CICS region. + - Use the O(state) option to specify the intended state for the auxiliary + temporary storage data set. For example, use O(state=initial) to create an auxiliary temporary storage + data set if it doesn't exist. +author: Andrew Twydell (@andrewtwydell) +version_added: 2.1.0 +extends_documentation_fragment: + - ibm.ibm_zos_cics.aux_temp_storage +""" + + +EXAMPLES = r""" +- name: Initialize an auxiliary temporary storage data set by using the templated location + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "initial" + +- name: Initialize a user specified auxiliary temporary storage data set + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + dfhtemp: + dsn: "REGIONS.ABCD0001.DFHTEMP" + state: "initial" + +- name: Initialize a large auxiliary temporary storage data set by using the templated location + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + space_primary: 50 + space_type: "M" + state: "initial" + +- name: Retain the existing state of an auxiliary temporary storage data set defined by the template + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + +- name: Retain the existing state of a user specified auxiliary temporary storage data set + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + dfhtemp: + dsn: "REGIONS.ABCD0001.DFHTEMP" + state: "warm" + +- name: Delete an existing auxiliary temporary storage data set defined by the template + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "absent" + +- name: Delete an existing user specified auxiliary temporary storage data set + ibm.ibm_zos_cics.aux_temp_storage: + region_data_sets: + dfhtemp: + dsn: "REGIONS.ABCD0001.DFHTEMP" + state: "absent" +""" + + +RETURN = r""" +changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +start_state: + description: + - The state of the auxiliary temporary storage data set before the Ansible task runs. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the specified auxiliary temporary storage data set exists. + type: bool + returned: always +end_state: + description: The state of the auxiliary temporary storage data set at the end of the Ansible task. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the specified auxiliary temporary storage data set exists. + type: bool + returned: always +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard output stream returned from the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always +msg: + description: A string containing an error message if applicable + returned: always + type: str +""" + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import ( + _build_idcams_define_cmd +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + RECORDS, + KILOBYTES, + MEGABYTES, + CYLINDERS, + TRACKS, + REGION_DATA_SETS, + SPACE_PRIMARY, + SPACE_SECONDARY, + SPACE_TYPE, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._aux_temp_storage import ( + _get_idcams_cmd_temp +) + + +DSN = "dfhtemp" +SPACE_PRIMARY_DEFAULT = 200 +SPACE_SECONDARY_DEFAULT = 10 +SPACE_OPTIONS = [KILOBYTES, MEGABYTES, RECORDS, CYLINDERS, TRACKS] + + +class AnsibleAuxiliaryTempModule(DataSet): + def __init__(self): + super(AnsibleAuxiliaryTempModule, self).__init__(SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT) + self.name = self.region_param[DSN]["dsn"].upper() + self.expected_data_set_organization = "VSAM" + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleAuxiliaryTempModule, self)._get_arg_spec() + + arg_spec[SPACE_PRIMARY].update({ + "default": SPACE_PRIMARY_DEFAULT + }) + arg_spec[SPACE_SECONDARY].update({ + "default": SPACE_SECONDARY_DEFAULT + }) + arg_spec[SPACE_TYPE].update({ + "default": RECORDS, + "choices": SPACE_OPTIONS, + }) + arg_spec[REGION_DATA_SETS]["options"].update({ + DSN: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + }) + + return arg_spec + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].pop("type") + return defs + + def create_data_set(self): # type: () -> None + create_cmd = _build_idcams_define_cmd(_get_idcams_cmd_temp(self.get_data_set())) + super().build_vsam_data_set(create_cmd) + + +def main(): + AnsibleAuxiliaryTempModule().main() + + +if __name__ == "__main__": + main() diff --git a/plugins/modules/aux_trace.py b/plugins/modules/aux_trace.py new file mode 100644 index 00000000..3bc69be2 --- /dev/null +++ b/plugins/modules/aux_trace.py @@ -0,0 +1,290 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: aux_trace +short_description: Allocate auxiliary trace data sets +description: + - Allocates the two L(auxiliary trace,https://www.ibm.com/docs/en/cics-ts/latest?topic=sets-setting-up-auxiliary-trace-data) + data sets used by a CICS® region. When CICS auxiliary trace is activated, trace entries produced by CICS are written to the auxiliary trace data sets. + These data sets can hold large amounts of trace data. + - The two data sets are referred to as auxiliary trace data set A (DFHAUXT) and auxiliary trace data set B (DFHBUXT). +author: Kye Maloy (@KyeMaloy97) +version_added: 2.1.0 +extends_documentation_fragment: + - ibm.ibm_zos_cics.aux_trace +''' + +EXAMPLES = r""" +- name: Allocate auxiliary trace data set A (implicit) by using the templated location + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + +- name: Allocate a user specified data set as auxiliary trace data set A (implicit) + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhauxt: + dsn: "REGIONS.ABCD0001.DFHAUXT" + state: initial + +- name: Allocate auxiliary trace data set A by using the templated location + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + destination: A + +- name: Allocate a user specified data set as auxiliary trace data set A + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhauxt: + dsn: "REGIONS.ABCD0001.DFHAUXT" + state: initial + destination: A + +- name: Allocate auxiliary trace data set B by using the templated location + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + destination: B + +- name: Allocate a user specified data set as auxiliary trace data set B + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhbuxt: + dsn: "REGIONS.ABCD0001.DFHBUXT" + state: initial + destination: B + +- name: Retain the existing state of auxiliary trace data set A (implicit) defined by the template + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + +- name: Retain the existing state of a user specified auxiliary trace data set A (implicit) + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhauxt: + dsn: "REGIONS.ABCD0001.DFHAUXT" + state: "warm" + +- name: Retain the existing state of auxiliary trace data set B defined by the template + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + destination: B + +- name: Retain the existing state of a user specified auxiliary trace data set B + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhbuxt: + dsn: "REGIONS.ABCD0001.DFHBUXT" + state: "warm" + destination: B + +- name: Delete auxiliary trace data set A (implicit) defined by the template + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: absent + +- name: Delete a user specified auxiliary trace data set A (implicit) + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhauxt: + dsn: "REGIONS.ABCD0001.DFHBUXT" + state: absent + +- name: Delete auxiliary trace data set B defined by the template + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: absent + destination: B + +- name: Delete a user specified auxiliary trace data set B + ibm.ibm_zos_cics.aux_trace: + region_data_sets: + dfhbuxt: + dsn: "REGIONS.ABCD0001.DFHBUXT" + state: absent + destination: B +""" + + +RETURN = r""" +changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +start_state: + description: + - The state of the auxiliary trace data set before the Ansible task runs. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "Sequential" + exists: + description: True if the specified auxiliary trace data set exists. + type: bool + returned: always +end_state: + description: The state of the auxiliary trace data set at the end of the Ansible task. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "Sequential" + exists: + description: True if the specified auxiliary trace data set exists. + type: bool + returned: always +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard output stream returned from the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always +msg: + description: A string containing an error message if applicable + returned: always + type: str +""" + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + DESTINATION, + DESTINATION_OPTIONS, + DESTINATION_DEFAULT_VALUE, + MEGABYTES, + REGION_DATA_SETS, + SPACE_PRIMARY, + SPACE_SECONDARY, + SPACE_TYPE, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._aux_trace import ( + _build_seq_data_set_definition_aux_trace +) + + +DSN_A = "dfhauxt" +DSN_B = "dfhbuxt" +SPACE_PRIMARY_DEFAULT = 20 +SPACE_SECONDARY_DEFAULT = 4 + + +class AnsibleAuxiliaryTraceModule(DataSet): + def __init__(self): # type: () -> None + self.ds_destination = "" + super(AnsibleAuxiliaryTraceModule, self).__init__(SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT) + self.ds_destination = DSN_B if self.destination == "B" else DSN_A + self.name = self.region_param[self.ds_destination]["dsn"].upper() + self.expected_data_set_organization = "Sequential" + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleAuxiliaryTraceModule, self)._get_arg_spec() + + arg_spec.update({ + DESTINATION: { + "type": "str", + "choices": DESTINATION_OPTIONS, + "default": DESTINATION_DEFAULT_VALUE + } + }) + + arg_spec[SPACE_PRIMARY].update({ + "default": SPACE_PRIMARY_DEFAULT + }) + arg_spec[SPACE_SECONDARY].update({ + "default": SPACE_SECONDARY_DEFAULT + }) + arg_spec[SPACE_TYPE].update({ + "default": MEGABYTES + }) + arg_spec[REGION_DATA_SETS]["options"].update({ + DSN_A: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + DSN_B: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + }) + + return arg_spec + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs[REGION_DATA_SETS]["options"][DSN_A]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN_B]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN_A]["options"]["dsn"].pop("type") + defs[REGION_DATA_SETS]["options"][DSN_B]["options"]["dsn"].pop("type") + return defs + + def create_data_set(self): # type: () -> None + definition = _build_seq_data_set_definition_aux_trace(self.get_data_set()) + super().build_seq_data_set(self.ds_destination, definition) + + +def main(): + AnsibleAuxiliaryTraceModule().main() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/cmci_action.py b/plugins/modules/cmci_action.py index 72db2860..d2a66f29 100644 --- a/plugins/modules/cmci_action.py +++ b/plugins/modules/cmci_action.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/plugins/modules/cmci_create.py b/plugins/modules/cmci_create.py index 290c394c..c7f4112f 100644 --- a/plugins/modules/cmci_create.py +++ b/plugins/modules/cmci_create.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/plugins/modules/cmci_delete.py b/plugins/modules/cmci_delete.py index d23c4d2f..3441d948 100644 --- a/plugins/modules/cmci_delete.py +++ b/plugins/modules/cmci_delete.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/plugins/modules/cmci_get.py b/plugins/modules/cmci_get.py index 033b885b..4a37bb1a 100644 --- a/plugins/modules/cmci_get.py +++ b/plugins/modules/cmci_get.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/plugins/modules/cmci_update.py b/plugins/modules/cmci_update.py index a52753a7..70e7622c 100644 --- a/plugins/modules/cmci_update.py +++ b/plugins/modules/cmci_update.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/plugins/modules/csd.py b/plugins/modules/csd.py new file mode 100644 index 00000000..6bf613bf --- /dev/null +++ b/plugins/modules/csd.py @@ -0,0 +1,429 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: csd +short_description: Create, remove, and manage the CICS CSD +description: + - Create, remove, and manage the + L(CICS system definition data set,https://www.ibm.com/docs/en/cics-ts/latest?topic=configuring-setting-up-shared-data-sets-csd-sysin) (CSD) used by a CICS® + region. + - You can use this module when provisioning or de-provisioning a CICS region, or when managing + the state of the CSD during upgrades or restarts. + - Use the O(state) option to specify the intended state for the CSD. + For example, use O(state=initial) to create and initialize a CSD + if it doesn't exist, or empty an existing CSD of all records. +author: Thomas Latham (@Thomas-Latham3) +version_added: 2.1.0 +extends_documentation_fragment: + - ibm.ibm_zos_cics.csd +''' + + +EXAMPLES = r""" +- name: Initialize a CSD by using the templated location + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "initial" + +- name: Initialize a user specified CSD + ibm.ibm_zos_cics.csd: + region_data_sets: + dfhcsd: + dsn: "REGIONS.ABCD0001.DFHCSD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "initial" + +- name: Initialize a large CSD by using the templated location + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + space_primary: 10 + space_type: "M" + state: "initial" + +- name: Delete a CSD defined by the template + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "absent" + +- name: Delete a user specified CSD + ibm.ibm_zos_cics.csd: + region_data_sets: + dfhcsd: + dsn: "REGIONS.ABCD0001.DFHCSD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "absent" + +- name: Retain the existing state of a CSD defined by the template + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "warm" + +- name: Retain the existing state of a user specified CSD + ibm.ibm_zos_cics.csd: + region_data_sets: + dfhcsd: + dsn: "REGIONS.ABCD0001.DFHCSD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "warm" + +- name: Run a DFHCSDUP script from a data set + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "changed" + input_location: "DATA_SET" + input_src: "TESTER.DEFS.SCRIPT" + +- name: Run a DFHCSDUP script from a USS file + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + input_location: "USS" + input_src: "/u/tester/defs/script.csdup" + +- name: Run a DFHCSDUP script from a local file + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + input_location: "LOCAL" + input_src: "/User/tester/defs/script.csdup" + +- name: Run a DFHCSDUP script inline + ibm.ibm_zos_cics.csd: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + input_location: "INLINE" + input_content: | + DEFINE PROGRAM(TESTPRG1) GROUP(TESTGRP1) + DEFINE PROGRAM(TESTPRG2) GROUP(TESTGRP2) +""" + + +RETURN = r""" +changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +start_state: + description: + - The state of the CSD before the Ansible task runs. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the CSD exists. + type: bool + returned: always +end_state: + description: The state of the CSD at the end of the Ansible task. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the CSD exists. + type: bool + returned: always +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard output stream returned from the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always +msg: + description: A string containing an error message if applicable + returned: always + type: str +""" + + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition, StdinDefinition +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import ( + _build_idcams_define_cmd +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + CICS_DATA_SETS, + MEGABYTES, + KILOBYTES, + RECORDS, + CYLINDERS, + TRACKS, + REGION_DATA_SETS, + SPACE_PRIMARY, + SPACE_SECONDARY, + SPACE_TYPE, + STATE, + ABSENT, + INITIAL, + WARM, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._csd import ( + _get_csdup_initilize_cmd, + _get_idcams_cmd_csd, + _run_dfhcsdup +) + +DSN = "dfhcsd" +SPACE_PRIMARY_DEFAULT = 4 +SPACE_SECONDARY_DEFAULT = 1 +SPACE_OPTIONS = [KILOBYTES, MEGABYTES, RECORDS, CYLINDERS, TRACKS] +CHANGED = "changed" +STATE_OPTIONS = [ABSENT, INITIAL, WARM, CHANGED] +INPUT_SOURCE = "input_src" +INPUT_LOCATION = "input_location" +INPUT_CONTENT = "input_content" +DATA_SET = "DATA_SET" +USS = "USS" +LOCAL = "LOCAL" +INLINE = "INLINE" +INPUT_LOCATION_OPTIONS = [DATA_SET, USS, LOCAL, INLINE] +INPUT_LOCATION_DEFAULT = DATA_SET +LOG = "log" +LOG_OPTIONS = ["NONE", "UNDO", "ALL"] +LOGSTREAMID = "logstream_id" + + +class AnsibleCSDModule(DataSet): + def __init__(self): + self.input_src = "" + self.input_location = "" + super(AnsibleCSDModule, self).__init__(SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT) + self._validate_log_args() + self.name = self.region_param[DSN]["dsn"].upper() + self.expected_data_set_organization = "VSAM" + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleCSDModule, self)._get_arg_spec() + + arg_spec[SPACE_PRIMARY].update({ + "default": SPACE_PRIMARY_DEFAULT + }) + arg_spec[SPACE_SECONDARY].update({ + "default": SPACE_SECONDARY_DEFAULT + }) + arg_spec[SPACE_TYPE].update({ + "default": MEGABYTES, + "choices": SPACE_OPTIONS, + }) + arg_spec[STATE].update({ + "choices": STATE_OPTIONS + }) + arg_spec[REGION_DATA_SETS]["options"].update({ + DSN: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + }) + arg_spec[CICS_DATA_SETS] = { + "type": "dict", + "required": True, + "options": { + "template": { + "type": "str", + "required": False, + }, + "sdfhload": { + "type": "str", + "required": False, + }, + }, + } + arg_spec.update({ + INPUT_SOURCE: { + "type": "str" + }, + INPUT_LOCATION: { + "type": "str", + "choices": INPUT_LOCATION_OPTIONS, + "default": DATA_SET + }, + INPUT_CONTENT: { + "type": "str" + }, + }) + arg_spec.update({ + LOG: { + "type": "str", + "choices": LOG_OPTIONS, + "required": False + }, + }) + arg_spec.update({ + LOGSTREAMID: { + "type": "str", + "required": False + }, + }) + return arg_spec + + def _validate_log_args(self): + if self._module.params.get(LOG, "") == "ALL" and self._module.params.get(LOGSTREAMID) is None: + self._fail("LOGSTREAMID must be provided when LOG is set to ALL.") + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].pop("type") + if INPUT_LOCATION == DATA_SET: + defs[INPUT_SOURCE].update({ + "arg_type": "data_set_base" + }) + defs[INPUT_SOURCE].pop("type") + return defs + + def assign_parameters(self, params): # type: (dict) -> None + super().assign_parameters(params) + if params.get(INPUT_SOURCE): + self.input_src = params[INPUT_SOURCE] + if params.get(INPUT_LOCATION): + self.input_location = params[INPUT_LOCATION] + if params.get(INPUT_CONTENT): + self.input_content = params[INPUT_CONTENT] + + def execute_target_state(self): # type: () -> None + if self.target_state == ABSENT: + self.delete_data_set() + elif self.target_state == INITIAL: + self.init_data_set() + elif self.target_state == WARM: + self.warm_with_records() + elif self.target_state == CHANGED: + self.csdup_script() + else: + self.invalid_target_state() + + def get_data_set(self): + data_set = super().get_data_set() + data_set.update({ + LOG: self._module.params.get(LOG), + LOGSTREAMID: self._module.params.get(LOGSTREAMID) + }) + return data_set + + def create_data_set(self): # type: () -> None + create_cmd = _build_idcams_define_cmd(_get_idcams_cmd_csd(self.get_data_set())) + super().build_vsam_data_set(create_cmd) + + def init_data_set(self): # type: () -> None + super().init_data_set() + try: + csdup_initialize_executions = _run_dfhcsdup(self.get_data_set(), _get_csdup_initilize_cmd()) + self.executions.extend(csdup_initialize_executions) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def csdup_script(self): + + if not self.input_location: + self._fail("input_location required") + + if self.input_location == INLINE: + if not self.input_content: + self._fail("input_content required when input_location={0}".format(self.input_location)) + else: + if not self.input_src: + self._fail("input_src required when input_location={0}".format(self.input_location)) + + try: + csdup_script_executions = [] + if self.input_location == DATA_SET: + csdup_script_executions.extend(_run_dfhcsdup(self.get_data_set(), DatasetDefinition(self.input_src))) + elif self.input_location == USS: + file = open(self.input_src) + file_content = file.read() + csdup_script_executions.extend(_run_dfhcsdup(self.get_data_set(), StdinDefinition(content=file_content))) + elif self.input_location in [LOCAL, INLINE]: + csdup_script_executions.extend(_run_dfhcsdup(self.get_data_set(), StdinDefinition(content=self.input_content))) + else: + self._fail("input_location: {0} not recognised.".format(self.input_location)) + + self.executions.extend(csdup_script_executions) + + self.changed = True + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + except (OSError, ValueError) as e: + # Handles the 'open' method failures + self.executions.extend(csdup_script_executions) + self._fail("{0} - {1}".format(type(e).__name__, str(e))) + + +def main(): + AnsibleCSDModule().main() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/global_catalog.py b/plugins/modules/global_catalog.py new file mode 100644 index 00000000..f58dc544 --- /dev/null +++ b/plugins/modules/global_catalog.py @@ -0,0 +1,447 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: global_catalog +short_description: Create, remove, and manage the CICS global catalog +description: + - Create, remove, and manage the L(global catalog,https://www.ibm.com/docs/en/cics-ts/latest?topic=catalogs-global-catalog) + data set used by a CICS® region. The global catalog is used to store start type information, location of the CICS system log, + installed resource definitions, terminal control information and profiles. It contains information that CICS requires on a restart. + - You can use this module when provisioning or de-provisioning a CICS region, or when managing + the state of the global catalog during upgrades or restarts. + - Use the O(state) option to specify the intended state for the global catalog. For example, use O(state=initial) to create + and initialize a global catalog data set if it doesn't exist, or set the autostart override record of an existing + global catalog to C(AUTOINIT). In either case, a CICS region that is using this global catalog and set with the + C(START=AUTO) system initialization parameter performs an initial start. +author: Andrew Twydell (@AndrewTwydell) +version_added: 2.1.0 +seealso: + - module: local_catalog +extends_documentation_fragment: + - ibm.ibm_zos_cics.global_catalog +''' + + +EXAMPLES = r""" +- name: Initialize a global catalog by using the templated location + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "initial" + +- name: Initialize a large global catalog by using the templated location + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + space_primary: 100 + space_type: "M" + state: "initial" + +- name: Initialize a large user specified global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "REGIONS.ABCD0001.DFHGCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + space_primary: 100 + space_type: "M" + state: "initial" + +- name: Set the autostart override record to AUTOASIS for a global catalog defined by the template + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "warm" + +- name: Set the autostart override record to AUTOASIS for a user specified global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "REGIONS.ABCD0001.DFHGCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "warm" + +- name: Set the autostart override record to AUTOCOLD for a global catalog defined by the template + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "cold" + +- name: Set the autostart override record to AUTOCOLD for a user specified global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "REGIONS.ABCD0001.DFHGCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "cold" + +- name: Delete a global catalog defined by the template + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "absent" + +- name: Delete a user specified global catalog + ibm.ibm_zos_cics.global_catalog: + region_data_sets: + dfhgcd: + dsn: "REGIONS.ABCD0001.DFHGCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "absent" +""" + + +RETURN = r""" +changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +start_state: + description: + - The state of the global catalog before the Ansible task runs. + returned: always + type: dict + contains: + autostart_override: + description: The current autostart override record. + returned: always + type: str + next_start: + description: The next start type listed in the global catalog. + returned: always + type: str + exists: + description: True if the specified global catalog data set exists. + type: bool + returned: always + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "VSAM" +end_state: + description: The state of the global catalog at the end of the Ansible task. + returned: always + type: dict + contains: + autostart_override: + description: The current autostart override record. + returned: always + type: str + next_start: + description: The next start type listed in the global catalog + returned: always + type: str + exists: + description: True if the specified global catalog data set exists. + type: bool + returned: always + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "VSAM" +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard output stream returned from the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always +msg: + description: A string containing an error message if applicable + returned: always + type: str +""" + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import ( + _build_idcams_define_cmd +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + CICS_DATA_SETS, + MEGABYTES, + KILOBYTES, + RECORDS, + CYLINDERS, + TRACKS, + REGION_DATA_SETS, + SPACE_PRIMARY, + SPACE_SECONDARY, + SPACE_TYPE, + STATE, + ABSENT, + INITIAL, + WARM, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._global_catalog import ( + _get_idcams_cmd_gcd, + _run_dfhrmutl +) + +COLD = "cold" +STATE_OPTIONS = [ABSENT, INITIAL, WARM, COLD] +SPACE_OPTIONS = [KILOBYTES, MEGABYTES, RECORDS, CYLINDERS, TRACKS] +DSN = "dfhgcd" +AUTO_START_WARM = "AUTOASIS" +AUTO_START_COLD = "AUTOCOLD" +AUTO_START_INIT = "AUTOINIT" +NEXT_START_EMERGENCY = "EMERGENCY" +NEXT_START_WARM = "WARM" +NEXT_START_COLD = "COLD" +NEXT_START_UNKNOWN = "UNKNOWN" +SPACE_PRIMARY_DEFAULT = 5 +SPACE_SECONDARY_DEFAULT = 1 + + +class AnsibleGlobalCatalogModule(DataSet): + def __init__(self): + self.autostart_override = "" + self.next_start = "" + super(AnsibleGlobalCatalogModule, self).__init__(SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT) + self.start_state = dict( + exists=False, + data_set_organization=self.data_set_organization, + autostart_override=self.autostart_override, + next_start=self.next_start + ) + self.end_state = dict( + exists=False, + data_set_organization=self.data_set_organization, + autostart_override=self.autostart_override, + next_start=self.next_start + ) + self.name = self.region_param[DSN]["dsn"].upper() + self.expected_data_set_organization = "VSAM" + + def get_data_set(self): # type: () -> dict + data_set = super().get_data_set() + data_set.update({ + "autostart_override": self.autostart_override, + "next_start": self.next_start, + }) + return data_set + + def set_start_state(self): # type: () -> None + self.start_state = dict( + exists=self.exists, + data_set_organization=self.data_set_organization, + autostart_override=self.autostart_override, + next_start=self.next_start + ) + + def set_end_state(self): # type: () -> None + self.end_state = dict( + exists=self.exists, + data_set_organization=self.data_set_organization, + autostart_override=self.autostart_override, + next_start=self.next_start + ) + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleGlobalCatalogModule, self)._get_arg_spec() + + arg_spec[SPACE_PRIMARY].update({ + "default": SPACE_PRIMARY_DEFAULT + }) + arg_spec[SPACE_SECONDARY].update({ + "default": SPACE_SECONDARY_DEFAULT + }) + arg_spec[SPACE_TYPE].update({ + "default": MEGABYTES, + "choices": SPACE_OPTIONS, + }) + arg_spec[STATE].update({ + "choices": STATE_OPTIONS + }) + arg_spec[REGION_DATA_SETS]["options"].update({ + DSN: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + }) + arg_spec[CICS_DATA_SETS] = { + "type": "dict", + "required": True, + "options": { + "template": { + "type": "str", + "required": False, + }, + "sdfhload": { + "type": "str", + "required": False, + }, + }, + } + + return arg_spec + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].pop("type") + return defs + + def create_data_set(self): # type: () -> None + create_cmd = _build_idcams_define_cmd(_get_idcams_cmd_gcd(self.get_data_set())) + super().build_vsam_data_set(create_cmd) + + def init_data_set(self): # type: () -> None + if self.exists and self.autostart_override == AUTO_START_INIT: + self._exit() + + if not self.exists: + self.create_data_set() + + self.check_emergency() + try: + dfhrmutl_executions = _run_dfhrmutl( + self.name, + self.sdfhload, + cmd="SET_AUTO_START=AUTOINIT") + self.changed = True + self.executions.extend(dfhrmutl_executions) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def warm_data_set(self): # type: () -> None + super().warm_with_records() + + if self.autostart_override == AUTO_START_WARM: + self._exit() + + if ( + self.autostart_override == AUTO_START_INIT and + self.next_start == NEXT_START_UNKNOWN + ): + self._fail( + "Unused catalog. The catalog must be used by CICS before doing a warm start.") + try: + dfhrmutl_executions = _run_dfhrmutl( + self.name, + self.sdfhload, + cmd="SET_AUTO_START=AUTOASIS") + self.changed = True + self.executions.extend(dfhrmutl_executions) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def cold_data_set(self): # type: () -> None + if not self.exists: + self._fail("Data set {0} does not exist.".format(self.name)) + + self.check_emergency() + if self.autostart_override == AUTO_START_COLD: + self._exit() + + if ( + self.autostart_override == AUTO_START_INIT and + self.next_start == NEXT_START_UNKNOWN + ): + self._fail( + "Unused catalog. The catalog must be used by CICS before doing a cold start.") + try: + dfhrmutl_executions = _run_dfhrmutl( + self.name, + self.sdfhload, + cmd="SET_AUTO_START=AUTOCOLD") + self.changed = True + self.executions.extend(dfhrmutl_executions) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def execute_target_state(self): # type: () -> None + if self.target_state == ABSENT: + self.delete_data_set() + elif self.target_state == INITIAL: + self.init_data_set() + elif self.target_state == WARM: + self.warm_data_set() + elif self.target_state == COLD: + self.cold_data_set() + else: + self.invalid_target_state() + + def update_data_set_state(self): # type: () -> None + super().update_data_set_state() + + if self.exists and (self.data_set_organization == self.expected_data_set_organization): + try: + dfhrmutl_executions, (self.autostart_override, self.next_start) = _run_dfhrmutl( + self.name, self.sdfhload) + + self.executions.extend(dfhrmutl_executions) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + else: + self.autostart_override = "" + self.next_start = "" + + def check_emergency(self): # type: () -> None + if self.next_start and self.next_start.upper() == NEXT_START_EMERGENCY: + self._fail( + "Next start type is {0}. Potential data loss prevented." + .format(NEXT_START_EMERGENCY)) + + +def main(): + AnsibleGlobalCatalogModule().main() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/local_catalog.py b/plugins/modules/local_catalog.py new file mode 100644 index 00000000..671bf6b7 --- /dev/null +++ b/plugins/modules/local_catalog.py @@ -0,0 +1,279 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: local_catalog +short_description: Create, remove, and manage the CICS local catalog +description: + - Create, remove, and manage the L(local catalog,https://www.ibm.com/docs/en/cics-ts/latest?topic=catalogs-local-catalog) + data set used by a CICS® region. CICS domains use the local catalog to save some of their information between CICS runs and + to preserve this information across a cold start. + - You can use this module when provisioning or de-provisioning a CICS region, or when managing + the state of the local catalog during upgrades or restarts. + - Use the O(state) option to specify the intended state for the local catalog. + For example, use O(state=initial) to create and initialize a local catalog data set if it doesn't exist, + or empty an existing local catalog of all records. +author: Enam Khan (@enam-khan) +version_added: 2.1.0 +seealso: + - module: global_catalog +extends_documentation_fragment: + - ibm.ibm_zos_cics.local_catalog +''' + + +EXAMPLES = r""" +- name: Initialize a local catalog data set by using the templated location + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "initial" + +- name: Initialize a user specified local catalog data set + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + dfhlcd: + dsn: "REGIONS.ABCD0001.DFHLCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "initial" + +- name: Initialize a large catalog data set by using the templated location + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + space_primary: 500 + space_type: "REC" + state: "initial" + +- name: Retain the existing local catalog defined by the template + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "warm" + +- name: Retain a user specified local catalog in its current state + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + dfhlcd: + dsn: "REGIONS.ABCD0001.DFHLCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "warm" + +- name: Delete a local catalog data set defined by the template + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + cics_data_sets: + template: "CICSTS61.CICS.<< lib_name >>" + state: "absent" + +- name: Delete a user specified local catalog data set + ibm.ibm_zos_cics.local_catalog: + region_data_sets: + dfhlcd: + dsn: "REGIONS.ABCD0001.DFHLCD" + cics_data_sets: + sdfhload: "CICSTS61.CICS.SDFHLOAD" + state: "absent" +""" + + +RETURN = r""" +changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +start_state: + description: + - The state of the local catalog data set before the Ansible task runs. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the specified local catalog data set exists. + type: bool + returned: always +end_state: + description: The state of the local catalog data set at the end of the Ansible task. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the specified local catalog data set exists. + type: bool + returned: always +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard output stream returned from the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always +msg: + description: A string containing an error message if applicable + returned: always + type: str +""" + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import ( + _build_idcams_define_cmd +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + CICS_DATA_SETS, + MEGABYTES, + KILOBYTES, + RECORDS, + CYLINDERS, + TRACKS, + REGION_DATA_SETS, + SPACE_PRIMARY, + SPACE_SECONDARY, + SPACE_TYPE, + ABSENT, + INITIAL, + WARM, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._local_catalog import ( + _get_idcams_cmd_lcd, + _run_dfhccutl +) + + +DSN = "dfhlcd" +SPACE_PRIMARY_DEFAULT = 200 +SPACE_SECONDARY_DEFAULT = 5 +SPACE_OPTIONS = [KILOBYTES, MEGABYTES, RECORDS, CYLINDERS, TRACKS] + + +class AnsibleLocalCatalogModule(DataSet): + def __init__(self): + super(AnsibleLocalCatalogModule, self).__init__(SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT) + self.name = self.region_param[DSN]["dsn"].upper() + self.expected_data_set_organization = "VSAM" + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleLocalCatalogModule, self)._get_arg_spec() + + arg_spec[SPACE_PRIMARY].update({ + "default": SPACE_PRIMARY_DEFAULT + }) + arg_spec[SPACE_SECONDARY].update({ + "default": SPACE_SECONDARY_DEFAULT + }) + arg_spec[SPACE_TYPE].update({ + "default": RECORDS, + "choices": SPACE_OPTIONS, + }) + arg_spec[REGION_DATA_SETS]["options"].update({ + DSN: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + }) + arg_spec[CICS_DATA_SETS] = { + "type": "dict", + "required": True, + "options": { + "template": { + "type": "str", + "required": False, + }, + "sdfhload": { + "type": "str", + "required": False, + }, + }, + } + + return arg_spec + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].pop("type") + return defs + + def execute_target_state(self): # type: () -> None + if self.target_state == ABSENT: + self.delete_data_set() + elif self.target_state == INITIAL: + self.init_data_set() + elif self.target_state == WARM: + self.warm_with_records() + else: + self.invalid_target_state() + + def create_data_set(self): # type: () -> None + create_cmd = _build_idcams_define_cmd(_get_idcams_cmd_lcd(self.get_data_set())) + super().build_vsam_data_set(create_cmd) + + def init_data_set(self): # type: () -> None + super().init_data_set() + try: + ccutl_executions = _run_dfhccutl(self.get_data_set()) + self.executions.extend(ccutl_executions) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + +def main(): + AnsibleLocalCatalogModule().main() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/local_request_queue.py b/plugins/modules/local_request_queue.py new file mode 100644 index 00000000..b06606d0 --- /dev/null +++ b/plugins/modules/local_request_queue.py @@ -0,0 +1,222 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: local_request_queue +short_description: Create and remove the CICS local request queue +description: + - Create and remove the L(local request queue,https://www.ibm.com/docs/en/cics-ts/latest?topic=sets-local-request-queue-data-set) + data set used by a CICS® region. The local request queue data set stores pending BTS requests. It ensures that, if CICS fails, no pending requests are lost. + - You can use this module when provisioning or de-provisioning a CICS region. + - Use the O(state) option to specify the intended state for the local request queue. + For example, use O(state=initial) to create a local request queue data set if it doesn't yet exist, + or empty an existing local request queue of all records. +author: Drew Hughes (@andrewhughes101) +version_added: 2.1.0 +extends_documentation_fragment: + - ibm.ibm_zos_cics.local_request_queue +''' + + +EXAMPLES = r""" +- name: Initialize a local request queue data set by using the templated location + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "initial" + +- name: Initialize a user specified local request queue data set + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + dfhlrq: + dsn: "REGIONS.ABCD0001.DFHLRQ" + state: "initial" + +- name: Initialize a large request queue data set by using the templated location + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + space_primary: 50 + space_type: "M" + state: "initial" + +- name: Retain the existing state of a local request queue data set defined by the template + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + +- name: Retain the existing state of a user specified local request queue data set + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + dfhlrq: + dsn: "REGIONS.ABCD0001.DFHLRQ" + state: "warm" + +- name: Delete a local request queue data set defined by the template + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "absent" + +- name: Delete a user specified local request queue data set + ibm.ibm_zos_cics.local_request_queue: + region_data_sets: + dfhlrq: + dsn: "REGIONS.ABCD0001.DFHLRQ" + state: "absent" +""" + + +RETURN = r""" +changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +start_state: + description: + - The state of the local request queue data set before the Ansible task runs. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the specified local request queue data set exists. + type: bool + returned: always +end_state: + description: The state of the local request queue data set at the end of the Ansible task. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the specified local request queue data set exists. + type: bool + returned: always +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard output stream returned from the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always +msg: + description: A string containing an error message if applicable + returned: always + type: str +""" + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import ( + _build_idcams_define_cmd +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + MEGABYTES, + KILOBYTES, + RECORDS, + CYLINDERS, + TRACKS, + REGION_DATA_SETS, + SPACE_PRIMARY, + SPACE_SECONDARY, + SPACE_TYPE, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._local_request_queue import ( + _get_idcams_cmd_lrq +) + + +DSN = "dfhlrq" +SPACE_PRIMARY_DEFAULT = 4 +SPACE_SECONDARY_DEFAULT = 1 +SPACE_OPTIONS = [KILOBYTES, MEGABYTES, RECORDS, CYLINDERS, TRACKS] + + +class AnsibleLocalRequestQueueModule(DataSet): + def __init__(self): + super(AnsibleLocalRequestQueueModule, self).__init__(SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT) + self.name = self.region_param[DSN]["dsn"].upper() + self.expected_data_set_organization = "VSAM" + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleLocalRequestQueueModule, self)._get_arg_spec() + + arg_spec[SPACE_PRIMARY].update({ + "default": SPACE_PRIMARY_DEFAULT + }) + arg_spec[SPACE_SECONDARY].update({ + "default": SPACE_SECONDARY_DEFAULT + }) + arg_spec[SPACE_TYPE].update({ + "default": MEGABYTES, + "choices": SPACE_OPTIONS, + }) + arg_spec[REGION_DATA_SETS]["options"].update({ + DSN: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + }) + + return arg_spec + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].pop("type") + return defs + + def create_data_set(self): # type: () -> None + create_cmd = _build_idcams_define_cmd(_get_idcams_cmd_lrq(self.get_data_set())) + super().build_vsam_data_set(create_cmd) + + +def main(): + AnsibleLocalRequestQueueModule().main() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/region_jcl.py b/plugins/modules/region_jcl.py new file mode 100644 index 00000000..078de8d6 --- /dev/null +++ b/plugins/modules/region_jcl.py @@ -0,0 +1,2321 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = r""" +--- +module: region_jcl +short_description: Create CICS startup JCL data set +description: + - Create a data set containing the JCL to start a CICS® region. + - The JCL is generated by your input of CICS system data sets and system initialization parameters for CICS startup using the C(DFHSIP) program. +author: Kiera Bennett (@KieraBennett) +version_added: 2.1.0 +seealso: + - module: stop_cics +extends_documentation_fragment: + - ibm.ibm_zos_cics.region_jcl.documentation +""" + +EXAMPLES = r""" +- name: Create CICS startup JCL data set + ibm.ibm_zos_cics.region_jcl: + applid: ABC9ABC1 + cics_data_sets: + template: 'CICSTS61.CICS.<< lib_name >>' + le_data_sets: + template: 'LANG.ENVIORNMENT.<< lib_name >>' + region_data_sets: + template: 'REGIONS.ABC9ABC1.<< data_set_name >>' + sit_parameters: + start: COLD + sit: 6$ + aicons: AUTO + auxtr: 'ON' + auxtrsw: ALL + cicssvc: 217 + csdrecov: BACKOUTONLY + edsalim: 500M + grplist: (DFHLIST,DFHTERML) + gmtext: 'ABC9ABC1. CICS Region' + icvr: 20000 + isc: 'YES' + ircstrt: 'YES' + mxt: 500 + pgaipgm: ACTIVE + sec: 'YES' + spool: 'YES' + srbsvc: 218 + tcpip: 'NO' + usshome: /usshome/directory + wlmhealth: "OFF" + wrkarea: 2048 + sysidnt: ZPY1 + +- name: Create CICS startup JCL data set with more customization + ibm.ibm_zos_cics.region_jcl: + applid: ABC9ABC1 + job_parameters: + class: A + cics_data_sets: + template: 'CICSTS61.CICS.<< lib_name >>' + sdfhauth: 'CICSTS61.OVERRDE.TEMPLT.SDFHAUTH' + le_data_sets: + template: 'LANG.ENVIORNMENT.<< lib_name >>' + region_data_sets: + template: 'REGIONS.ABC9ABC1.<< data_set_name >>' + output_data_sets: + default_sysout_class: B + ceemsg: + sysout: A + sysprint: + omit: True + steplib: + top_data_sets: + - TOP.DATA_SET.ONE + - TOP.DATA_SET.TWO + data_sets: + - BOTTOM.DATA_SET.ONE + sit_parameters: + start: COLD + sit: 6$ + aicons: AUTO + auxtr: 'ON' + auxtrsw: ALL + cicssvc: 217 + csdrecov: BACKOUTONLY + edsalim: 500M + grplist: (DFHLIST,DFHTERML) + gmtext: 'ABC9ABC1. CICS Region' + icvr: 20000 + isc: 'YES' + ircstrt: 'YES' + mxt: 500 + pgaipgm: ACTIVE + stntrxx: + ab: ALL + skrxxxx: + PA21: 'COMMAND' + sec: 'YES' + spool: 'YES' + srbsvc: 218 + tcpip: 'NO' + usshome: /usshome/directory + wlmhealth: "OFF" + wrkarea: 2048 + sysidnt: ZPY1 +""" + +RETURN = r""" + changed: + description: True if the CICS startup JCL data set was created, otherwise False. + returned: always + type: bool + failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool + start_state: + description: + - The state of the CICS startup JCL data set before the Ansible task runs. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "Sequential" + exists: + description: True if the CICS startup JCL data set exists. + type: bool + returned: always + end_state: + description: The state of the CICS startup JCL data set at the end of the Ansible task. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "Sequential" + exists: + description: True if the CICS startup JCL data set exists. + type: bool + returned: always + jcl: + description: The CICS startup JCL that is built during module execution. + returned: always + type: list + executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard out stream returned by the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always + msg: + description: A string containing an error message if applicable. + returned: always + type: str +""" + +import string +import math +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import is_member +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + MEGABYTES, + REGION_DATA_SETS, + CICS_DATA_SETS, + SPACE_TYPE, + ABSENT, + INITIAL, + WARM, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import _read_data_set_content, _write_jcl_to_data_set +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._jcl_helper import ( + JCLHelper, DLM, DD_INSTREAM, CONTENT, END_INSTREAM, JOB_CARD, EXECS, JOB_NAME, DDS, NAME +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import _run_listds + + +DFHSTART = "dfhstart" +SPACE_PRIMARY_DEFAULT = 5 +SPACE_SECONDARY_DEFAULT = 3 + + +region_data_sets_list = ['dfhauxt', 'dfhbuxt', 'dfhcsd', 'dfhgcd', 'dfhintra', + 'dfhlcd', 'dfhlrq', 'dfhtemp', 'dfhdmpa', 'dfhdmpb'] +APPLID = 'applid' +CEEMSG = 'ceemsg' +CEEOUT = 'ceeout' +CPSM_DATA_SETS = 'cpsm_data_sets' +DD_DATA = 'DD DATA' +DD_NAME = 'dd_name' +DEFAULT_SYSOUT_CLASS = 'default_sysout_class' +DFHCXRF = 'dfhcxrf' +DFHRPL = 'dfhrpl' +DFHSIP = 'DFHSIP' +DISP = 'disp' +DSN = 'dsn' +JOB_PARAMETERS = 'job_parameters' +LE_DATA_SETS = 'le_data_sets' +LOGUSR = 'logusr' +DATA_SETS = 'data_sets' +MSGUSR = 'msgusr' +OMIT = 'omit' +OUTPUT_DATA_SETS = 'output_data_sets' +PARTITIONED = 'Partitioned' +PGM = 'pgm' +SEQUENTIAL = 'Sequential' +SIT_PARAMETERS = 'sit_parameters' +SHR = 'SHR' +STEPLIB = 'steplib' +SYSABEND = 'sysabend' +SYSIN = 'sysin' +SYSOUT = 'sysout' +SYSPRINT = 'sysprint' +SYSUDUMP = 'sysudump' +TEMPLATE = 'template' +TOP_DATA_SETS = 'top_data_sets' + + +class AnsibleRegionJCLModule(DataSet): + def __init__(self): + self.jcl = "" + super(AnsibleRegionJCLModule, self).__init__(1, 1) + self.name = self.region_param[DFHSTART][DSN].upper() + self.base_data_set_name = "" + self.base_exists = False + self.base_data_set_organization = "" + self.dds = [] + self.jcl_helper = JCLHelper() + self.primary_unit = "" + self.secondary_unit = "" + + def check_member(self): + ds_name_param = self._module.params[REGION_DATA_SETS][DFHSTART][DSN] + return is_member(ds_name_param) + + def get_expected_ds_org(self): + if self.member: + return PARTITIONED + else: + return SEQUENTIAL + + def get_result(self): # type: () -> dict + result = super().get_result() + result.update({ + "jcl": self.jcl + }) + return result + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleRegionJCLModule, self)._get_arg_spec() + arg_spec[SPACE_TYPE].update({ + "default": MEGABYTES + }) + # Add all the unique arguments for the module + arg_spec.update(self.init_argument_spec()) + return arg_spec + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs.update(self.init_argument_spec()) + self.member = self.check_member() + self.expected_data_set_organization = self.get_expected_ds_org() + if self.member: + self.update_arg_def(defs[REGION_DATA_SETS]["options"][DFHSTART]["options"][DSN], "data_set_member") + else: + region_data_sets_list.append("dfhstart") + self.batch_update_arg_defs_for_ds(defs, REGION_DATA_SETS, region_data_sets_list, True) + self.batch_update_arg_defs_for_ds(defs, CICS_DATA_SETS, ["sdfhauth", "sdfhlic", "sdfhload"]) + self.batch_update_arg_defs_for_ds(defs, LE_DATA_SETS, ["sceecics", "sceerun", "sceerun2"]) + self.batch_update_arg_defs_for_ds(defs, CPSM_DATA_SETS, ["seyuload", "seyuauth"]) + defs[STEPLIB]["options"][TOP_DATA_SETS].update({"elements": "data_set_base"}) + defs[STEPLIB]["options"][DATA_SETS].update({"elements": "data_set_base"}) + defs[DFHRPL]["options"][TOP_DATA_SETS].update({"elements": "data_set_base"}) + defs[DFHRPL]["options"][DATA_SETS].update({"elements": "data_set_base"}) + self.update_arg_def(defs[APPLID], "qualifier") + if defs.get(JOB_PARAMETERS) and defs[JOB_PARAMETERS]["options"].get(JOB_NAME): + # If they've provided a job_name we need to validate this too + self.update_arg_def(defs[JOB_PARAMETERS]["options"][JOB_NAME], "qualifier") + # Popping sit parameters as these dont need validation and it will complain at arbitary keys. + defs.pop(SIT_PARAMETERS) + return defs + + def batch_update_arg_defs_for_ds(self, defs, key, list_of_args_to_update, dsn=False): + for arg in list_of_args_to_update: + if dsn: + self.update_arg_def(defs[key]["options"][arg]["options"][DSN]) + else: + self.update_arg_def(defs[key]["options"][arg]) + + def update_arg_def(self, dict_to_update, arg_type="data_set_base"): + dict_to_update.update({"arg_type": arg_type}) + dict_to_update.pop("type") + + def calculate_size_parameters(self): + # Default primary and secondary units to the space_type module arg + self.primary_unit = self.unit + self.secondary_unit = self.unit + + min_size_bytes = int(math.ceil(len(self.jcl.encode()) / 1024)) + primary_size_kilobytes = int(math.ceil(min_size_bytes * 1.1)) + secondary_size_kilobytes = int(math.ceil(primary_size_kilobytes * 0.1)) + + # Check if a user has passed space arguments + if not self._module.params.get("space_primary"): + self.primary = primary_size_kilobytes + self.primary_unit = "K" + + if not self._module.params.get("space_secondary"): + self.secondary = secondary_size_kilobytes + self.secondary_unit = "K" + + def create_data_set(self): # type: () -> None + self.calculate_size_parameters() + if self.member: + if not self.base_exists: + self._fail("Base data set {0} does not exist. Can only create a member in an existing PDS/E".format(self.base_data_set_name)) + if self.base_data_set_organization != PARTITIONED: + self._fail("Base data set {0} is not a PDS/E. Member cannot be created in base data set".format(self.base_data_set_name)) + else: + data_set_def = DatasetDefinition( + dataset_name=self.name, + primary=self.primary, + secondary=self.secondary, + primary_unit=self.primary_unit, + secondary_unit=self.secondary_unit, + volumes=self.volumes, + block_size=4096, + record_length=80, + record_format="FB", + disposition="NEW", + normal_disposition="CATALOG", + conditional_disposition="DELETE", + type="SEQ" + ) + super().build_seq_data_set(DFHSTART, data_set_def) + + self.write_jcl() + + def generate_jcl(self): + self._build_data_structure_of_arguments() + self.jcl_helper.render_jcl() + self.jcl = "\n".join(self.jcl_helper.jcl) + + def write_jcl(self): + try: + jcl_writer_execution = _write_jcl_to_data_set(self.jcl, self.name) + self.executions.extend(jcl_writer_execution) + self.changed = True + except MVSExecutionException as e: + self.executions.extend(e.executions) + super()._fail(e.message) + + def init_data_set(self): + self.generate_jcl() + if self.exists: + super().delete_data_set() + self.update_data_set_state() + self.create_data_set() + else: + self.create_data_set() + + def warm_target_state(self): + if (self.exists and not self.member) or (self.exists and self.base_exists and self.member): + self.generate_jcl() + try: + jcl_writer_execution, jcl_data = _read_data_set_content(self.name) + self.executions.extend(jcl_writer_execution) + gen_jcl = set(self.jcl.split()) + existing_jcl = set(jcl_data.split()) + + jcl_diff = gen_jcl.symmetric_difference(existing_jcl) + if len(jcl_diff) != 0: + super()._fail("Data set {0} does not contain the expected Region JCL.".format(self.name)) + except MVSExecutionException as e: + self.executions.extend(e.executions) + super()._fail(e.message) + else: + super()._fail("Data set {0} does not exist.".format(self.name)) + + def execute_target_state(self): # type: () -> None + if self.target_state == ABSENT: + super().delete_data_set() + elif self.target_state == INITIAL: + self.init_data_set() + elif self.target_state == WARM: + self.warm_target_state() + else: + super().invalid_target_state() + + def update_data_set_state(self): # type: () -> None + try: + if self.member: + self.base_data_set_name = self.name.split("(")[0] + + listds_executions, self.base_exists, self.base_data_set_organization = _run_listds(self.base_data_set_name) + self.executions.extend(listds_executions) + + listds_executions, self.exists, self.data_set_organization = _run_listds(self.name) + self.executions.extend(listds_executions) + except MVSExecutionException as e: + self.executions.extend(e.executions) + self._fail(e.message) + + def _build_data_structure_of_arguments(self): + self._remove_none_values_from_dict(self._module.params) + self._populate_job_card_dict() + self._populate_exec_dict() + + def _populate_job_card_dict(self): + job_name = self._module.params[APPLID] + self.jcl_helper.job_data[JOB_CARD] = self._module.params.get(JOB_PARAMETERS, {JOB_NAME: job_name}) + if self.jcl_helper.job_data[JOB_CARD].get(JOB_NAME) is None: + self.jcl_helper.job_data[JOB_CARD].update({JOB_NAME: job_name}) + + def _populate_exec_dict(self): + exec_data = {NAME: "", + PGM: DFHSIP, + DDS: self._populate_dds()} + exec_data = self._add_exec_parameters(exec_data) + + def _populate_dds(self): + self._copy_libraries_to_steplib_and_dfhrpl() + self._add_block_of_libraries(STEPLIB) + self._add_block_of_libraries(DFHRPL) + self._add_per_region_data_sets() + self._add_output_data_sets() + self._add_sit_parameters() + return self.dds + + def _copy_libraries_to_steplib_and_dfhrpl(self): + steplib_args = {CICS_DATA_SETS: ["sdfhauth", "sdfhlic"], CPSM_DATA_SETS: ["seyuauth"], LE_DATA_SETS: ["sceerun", "sceerun2"]} + dfhrpl_args = {CICS_DATA_SETS: ["sdfhload"], CPSM_DATA_SETS: ["seyuload"], LE_DATA_SETS: ["sceecics", "sceerun", "sceerun2"]} + self._copy_libraries(steplib_args, STEPLIB) + self._copy_libraries(dfhrpl_args, DFHRPL) + + def _copy_libraries(self, libraries_to_copy, target_arg): + for lib_type, list_of_libs in libraries_to_copy.items(): + for lib in list_of_libs: + if self._module.params.get(lib_type) and self._module.params[lib_type].get(lib): + self._module.params[target_arg][TOP_DATA_SETS].append(self._module.params[lib_type][lib].upper()) + + def _add_exec_parameters(self, exec_data): + if self._check_parameter_is_provided(SIT_PARAMETERS): + # We will need PARM=SI if they've provided SIT parameters, we add this for them. + exec_data.update({"PARM": "SI"}) + self.jcl_helper.job_data[EXECS].append(exec_data) + return exec_data + + def _add_block_of_libraries(self, lib_name): + if self._check_parameter_is_provided(lib_name): + libraries = self._concat_libraries(lib_name) + list_of_lib_dicts = self._add_libraries(libraries) + if list_of_lib_dicts: + self.dds.append({lib_name: list_of_lib_dicts}) + + def _get_delimiter(self, content): + # If they've used the instream delimiter in their instream data + if AnsibleRegionJCLModule._check_for_existing_dlm_within_content(content): + dlm = self._find_unused_character(content) + if dlm is None: + super()._fail( + "Cannot replace instream delimiter as all character instances have been used.") + # Return a new delimiter so that they dont accidentally terminate their instream early. + return dlm + # They've not used a dlm in their instream data so we don't have to replace it. + return None + + @staticmethod + def _find_unused_character(content): + all_chars = '@$#' + string.ascii_uppercase + string.digits + char_combinations_present = set() + preferred_dlms = ['@', '$', '#'] + + for line in content: + first_two_chars_in_line = line[:2] + char_combinations_present.add(first_two_chars_in_line) + combination = AnsibleRegionJCLModule._get_unused_combination_of_chars( + char_combinations_present, preferred_dlms) + if combination: + return combination + else: + return AnsibleRegionJCLModule._get_unused_combination_of_chars(char_combinations_present, + all_chars) + + @staticmethod + def _get_unused_combination_of_chars(combinations, all_chars): + for char1 in all_chars: + for char2 in all_chars: + combination = char1 + char2 + if combination not in combinations: + return combination + return None + + @staticmethod + def _check_for_existing_dlm_within_content(content): + for current_item in content: + if END_INSTREAM in current_item: + return True + return False + + def _validate_content(self, content): + for current_item in content: + if DD_INSTREAM in current_item.upper(): + super()._fail("Invalid content for an in-stream: {0}".format(DD_INSTREAM)) + if DD_DATA in current_item.upper(): + super()._fail("Invalid content for an in-stream: {0}".format(DD_DATA)) + + def _add_output_data_sets(self): + output_data_sets = [CEEMSG, CEEOUT, MSGUSR, SYSPRINT, SYSUDUMP, SYSABEND, SYSOUT, + DFHCXRF, LOGUSR] + + user_provided_data_sets = self._module.params.get(OUTPUT_DATA_SETS, {}) + default_class = user_provided_data_sets.pop(DEFAULT_SYSOUT_CLASS, '*') + + for data_set in output_data_sets: + self._set_sysout_class_for_data_set( + data_set, default_class, user_provided_data_sets) + self._remove_omitted_data_set(data_set, user_provided_data_sets) + + for data_set_name, parameters in user_provided_data_sets.items(): + self.dds.append({data_set_name: [parameters]}) + + @staticmethod + def _remove_omitted_data_set(data_set, user_provided_data_sets): + if user_provided_data_sets.get(data_set) and user_provided_data_sets[data_set].get( + OMIT) is True: + user_provided_data_sets.pop(data_set) + + @staticmethod + def _set_sysout_class_for_data_set(data_set, default_class, user_provided_data_sets): + if user_provided_data_sets.get(data_set): + if user_provided_data_sets.get(data_set).get(SYSOUT) is None: + user_provided_data_sets[data_set][SYSOUT] = default_class.upper() + else: + user_provided_data_sets[data_set] = {SYSOUT: default_class.upper()} + + def _add_per_region_data_sets(self): + data_set_dict = self._module.params.get(REGION_DATA_SETS) + + for dd_name, parameters in data_set_dict.items(): + if dd_name != "dfhstart": + parameters[DSN] = parameters[DSN].upper() + parameters[DISP] = SHR + self.dds.append({dd_name: [parameters]}) + + def _add_libraries(self, data_sets): + dsn_dict = [] + for data_set in data_sets: + if data_set: + dsn_dict.append({DSN: data_set.upper(), DISP: SHR}) + return dsn_dict + + def _add_sit_parameters(self): + if self._check_parameter_is_provided(SIT_PARAMETERS): + self._module.params[SIT_PARAMETERS][APPLID] = self._module.params[APPLID] + sit_parms = self._manage_dictionaries_in_sit_parameters( + self._module.params[SIT_PARAMETERS]) + list_of_strings = JCLHelper._concatenate_key_value_pairs_into_list( + sit_parms) + self._validate_content(list_of_strings) + wrapped_content = AnsibleRegionJCLModule._wrap_sit_parameters(list_of_strings) + dlm = self._get_delimiter(wrapped_content) + if dlm: + self.dds.append( + {SYSIN: {DLM: dlm, CONTENT: wrapped_content}}) + else: + self.dds.append({SYSIN: {CONTENT: wrapped_content}}) + + def _manage_dictionaries_in_sit_parameters(self, dictionary): + key_values_to_add = {} + keys_to_remove = [] + for k, v in dictionary.items(): + if isinstance(v, dict): + new_key = k.rstrip('x') + for inner_k, inner_v in v.items(): + self._validate_dictionary_value_within_sit_parms( + k, inner_k) + key_values_to_add[new_key + inner_k] = inner_v + keys_to_remove.append(k) + for k, v in key_values_to_add.items(): + dictionary[k] = v + for key in keys_to_remove: + dictionary.pop(key) + return dictionary + + def _validate_dictionary_value_within_sit_parms(self, sit_param_key_with_trailing_x, chars_to_replace_trailing_x): + number_of_x_chars = len(sit_param_key_with_trailing_x) - len(sit_param_key_with_trailing_x.rstrip('x')) + + if sit_param_key_with_trailing_x.upper() == "SKRXXXX": + if len(chars_to_replace_trailing_x) != 3 and len(chars_to_replace_trailing_x) != 4: + super()._fail("Invalid key: {0}. Key must be a length of 3 or 4.".format(chars_to_replace_trailing_x)) + elif len(chars_to_replace_trailing_x) != number_of_x_chars: + super()._fail("Invalid key: {0}. Key must be the same length as the x's within {1}.".format( + chars_to_replace_trailing_x, sit_param_key_with_trailing_x)) + + def _remove_none_values_from_dict(self, dictionary): + for k, v in list(dictionary.items()): + if v is None: + del dictionary[k] + elif isinstance(v, dict): + self._remove_none_values_from_dict(v) + + def _check_parameter_is_provided(self, parameter_name): + if self._module.params.get(parameter_name) is None or self._module.params.get(parameter_name) is {}: + return False + return True + + def _concat_libraries(self, ds_name): + data_sets = [] + data_set_types = [TOP_DATA_SETS, DATA_SETS] + for data_set_name in data_set_types: + if self._module.params.get(ds_name).get(data_set_name): + data_sets.extend(self._module.params[ds_name][data_set_name]) + return data_sets + + @staticmethod + def _wrap_sit_parameters(content): + wrapped_content = [] + # These sit parameters are the only ones which can be wrapped. + wrappable_sit_parameters = ["CRLPROFILE", "USSHOME", "GMTEXT", "USSCONFIG", "HTTPSERVERHDR", + "HTTPUSRAGENTHDR", "INFOCENTER", "JVMPROFILEDIR"] + for line in content: + wrapped = False + for sit_parm in wrappable_sit_parameters: + extracted_sit_parameter_from_line = AnsibleRegionJCLModule._find_sit_parm_key(line) + if extracted_sit_parameter_from_line == sit_parm: + if len(line) > 80: + # If the lines too long, break after character 80 and put 80 character chunks into the list. + wrapped_content.extend([line[i:i + 80] for i in range(0, len(line), 80)]) + wrapped = True + break + if not wrapped: + wrapped_content.append(line) + return wrapped_content + + @staticmethod + def _find_sit_parm_key(input_string): + index = input_string.find('=') + if index != -1: + return input_string[:index].strip() + else: + return None + + @staticmethod + def init_argument_spec(): # type: () -> dict + return { + JOB_PARAMETERS: { + 'type': 'dict', + 'required': False, + 'options': { + 'accounting_information': { + 'type': 'dict', + 'required': False, + 'options': { + 'pano': { + 'type': 'str', + 'required': False, + }, + 'room': { + 'type': 'str', + 'required': False, + }, + 'time': { + 'type': 'int', + 'required': False, + }, + 'lines': { + 'type': 'int', + 'required': False, + }, + 'cards': { + 'type': 'int', + 'required': False, + }, + 'forms': { + 'type': 'str', + 'required': False, + }, + 'copies': { + 'type': 'int', + 'required': False, + }, + 'log': { + 'type': 'str', + 'required': False, + }, + 'linect': { + 'type': 'int', + 'required': False, + } + } + }, + 'class': { + 'type': 'str', + 'required': False + }, + 'job_name': { + 'type': 'str', + 'required': False + }, + 'memlimit': { + 'type': 'str', + 'required': False + }, + 'msgclass': { + 'type': 'str', + 'required': False + }, + 'msglevel': { + 'type': 'dict', + 'required': False, + 'options': { + 'statements': { + 'type': 'int', + 'required': False, + 'choices': [0, 1, 2] + }, + 'messages': { + 'type': 'int', + 'required': False, + 'choices': [0, 1] + } + } + }, + 'programmer_name': { + 'type': 'str', + 'required': False, + }, + 'region': { + 'type': 'str', + 'required': False + }, + 'user': { + 'type': 'str', + 'required': False + }, + } + }, + APPLID: { + 'type': 'str', + 'required': True, + }, + CICS_DATA_SETS: { + 'type': 'dict', + 'required': True, + 'options': { + 'template': { + 'type': 'str', + 'required': False + }, + 'sdfhload': { + 'type': 'str', + 'required': False + }, + 'sdfhauth': { + 'type': 'str', + 'required': False + }, + 'sdfhlic': { + 'type': 'str', + 'required': False + } + } + }, + LE_DATA_SETS: { + 'type': 'dict', + 'required': True, + 'options': { + TEMPLATE: { + 'type': 'str', + 'required': False + }, + 'sceecics': { + 'type': 'str', + 'required': False + }, + 'sceerun': { + 'type': 'str', + 'required': False + }, + 'sceerun2': { + 'type': 'str', + 'required': False + } + } + }, + CPSM_DATA_SETS: { + 'type': 'dict', + 'required': False, + 'options': { + TEMPLATE: { + 'type': 'str', + 'required': False + }, + 'seyuauth': { + 'type': 'str', + 'required': False + }, + 'seyuload': { + 'type': 'str', + 'required': False + } + } + }, + STEPLIB: { + 'type': 'dict', + 'required': False, + 'options': { + TOP_DATA_SETS: { + 'type': 'list', + 'required': False, + 'elements': 'str' + }, + DATA_SETS: { + 'type': 'list', + 'required': False, + 'elements': 'str' + }, + } + }, + DFHRPL: { + 'type': 'dict', + 'required': False, + 'options': { + TOP_DATA_SETS: { + 'type': 'list', + 'required': False, + 'elements': 'str' + }, + DATA_SETS: { + 'type': 'list', + 'required': False, + 'elements': 'str' + } + } + }, + REGION_DATA_SETS: { + 'type': 'dict', + 'required': True, + 'options': { + TEMPLATE: { + 'type': 'str', + 'required': False + }, + 'dfhcsd': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhlrq': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhdmpa': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhdmpb': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhauxt': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhbuxt': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhlcd': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhgcd': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhintra': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhtemp': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + }, + } + }, + 'dfhstart': { + 'type': 'dict', + 'required': False, + 'options': { + DSN: { + 'type': 'str', + 'required': False + } + } + } + } + }, + OUTPUT_DATA_SETS: { + 'type': 'dict', + 'required': False, + 'options': { + DEFAULT_SYSOUT_CLASS: { + 'type': 'str', + 'required': False + }, + CEEMSG: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + }, + } + }, + CEEOUT: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + }, + } + }, + MSGUSR: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + }, + } + }, + SYSPRINT: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + }, + } + }, + SYSUDUMP: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + }, + } + }, + SYSABEND: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + }, + } + }, + SYSOUT: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + }, + } + }, + DFHCXRF: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + }, + } + }, + LOGUSR: { + 'type': 'dict', + 'required': False, + 'options': { + 'sysout': { + 'type': 'str', + 'required': False + }, + OMIT: { + 'type': 'bool', + 'required': False + } + } + } + } + }, + SIT_PARAMETERS: { + 'type': 'dict', + 'required': False, + 'options': { + 'adi': { + 'type': 'int', + 'required': False, + }, + 'aibridge': { + 'type': 'str', + 'required': False, + 'choices': ['AUTO', 'YES'] + }, + 'aicons': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'AUTO', 'YES'] + }, + 'aiexit': { + 'type': 'str', + 'required': False, + }, + 'aildelay': { + 'type': 'int', + 'required': False, + }, + 'aiqmax': { + 'type': 'int', + 'required': False, + }, + 'airdelay': { + 'type': 'int', + 'required': False, + }, + 'akpfreq': { + 'type': 'int', + 'required': False, + }, + 'autconn': { + 'type': 'int', + 'required': False, + }, + 'autodst': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'autoresettime': { + 'type': 'str', + 'required': False, + 'choices': ['IMMEDIATE', 'NO', 'YES'], + }, + 'auxtr': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'auxtrsw': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'ALL', 'NEXT'], + }, + 'bms': { + 'type': 'str', + 'required': False, + }, + 'brmaxkeeptime': { + 'type': 'int', + 'required': False, + }, + 'cdsasze': { + 'type': 'int', + 'required': False, + }, + 'certexpirywarn': { + 'type': 'str', + 'required': False, + }, + 'chkstrm': { + 'type': 'str', + 'required': False, + 'choices': ['CURRENT', 'NONE'], + }, + 'chkstsk': { + 'type': 'str', + 'required': False, + 'choices': ['CURRENT', 'NONE'], + }, + 'cicssvc': { + 'type': 'int', + 'required': False, + }, + 'cilock': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'clintcp': { + 'type': 'str', + 'required': False, + }, + 'clsdstp': { + 'type': 'str', + 'required': False, + 'choices': ['NOTIFY', 'NONOTIFY'], + }, + 'clt': { + 'type': 'str', + 'required': False, + }, + 'cmdprot': { + 'type': 'str', + 'required': False, + 'choices': ['YES', 'NO'], + }, + 'cmdsec': { + 'type': 'str', + 'required': False, + 'choices': ['ASIS', 'ALWAYS'], + }, + 'confdata': { + 'type': 'str', + 'required': False, + 'choices': ['SHOW', 'HIDE'], + }, + 'conftxt': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'cpsmconn': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'CMAS', 'LMAS', 'WUI', 'SMSSJ'], + }, + 'crlprofile': { + 'type': 'str', + 'required': False, + }, + 'csdacc': { + 'type': 'str', + 'required': False, + 'choices': ['READWRITE', 'READONLY'], + }, + 'csdbkup': { + 'type': 'str', + 'required': False, + 'choices': ['STATIC', 'DYNAMIC'], + }, + 'csdbufnd': { + 'type': 'int', + 'required': False, + }, + 'csdbufni': { + 'type': 'int', + 'required': False, + }, + 'csddisp': { + 'type': 'str', + 'required': False, + 'choices': ['OLD', 'SHR'], + }, + 'csddsn': { + 'type': 'str', + 'required': False, + }, + 'csdfrlog': { + 'type': 'int', + 'required': False, + }, + 'csdinteg': { + 'type': 'str', + 'required': False, + 'choices': ['UNCOMMITTED', 'CONSISTENT', 'REPEATABLE'], + }, + 'csdjid': { + 'type': 'str', + 'required': False, + }, + 'csdlsrno': { + 'type': 'str', + 'required': False, + }, + 'csdrecov': { + 'type': 'str', + 'required': False, + 'choices': ['NONE', 'ALL', 'BACKOUTONLY'], + }, + 'csdrls': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'csdstrno': { + 'type': 'int', + 'required': False, + }, + 'cwakey': { + 'type': 'str', + 'required': False, + 'choices': ['USER', 'CICS'], + }, + 'dae': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'datform': { + 'type': 'str', + 'required': False, + 'choices': ['MMDDYY', 'DDMMYY', 'YYMMDD'], + }, + 'db2conn': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'dbctlcon': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'debugtool': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'dfltuser': { + 'type': 'str', + 'required': False, + }, + 'dip': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'dismacp': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'doccodepage': { + 'type': 'str', + 'required': False, + }, + 'dsalim': { + 'type': 'str', + 'required': False, + }, + 'dshipidl': { + 'type': 'int', + 'required': False, + }, + 'dshipint': { + 'type': 'int', + 'required': False, + }, + 'dsrtpgm': { + 'type': 'str', + 'required': False, + }, + 'dtrpgm': { + 'type': 'str', + 'required': False, + }, + 'dtrtran': { + 'type': 'str', + 'required': False, + }, + 'dump': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES', 'TABLEONLY'], + }, + 'dumpds': { + 'type': 'str', + 'required': False, + 'choices': ['AUTO', 'A', 'B'], + }, + 'dumpsw': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'NEXT', 'ALL'], + }, + 'duretry': { + 'type': 'int', + 'required': False, + }, + 'ecdsasze': { + 'type': 'str', + 'required': False, + }, + 'edsalim': { + 'type': 'str', + 'required': False, + }, + 'eodi': { + 'type': 'str', + 'required': False, + }, + 'epcdsasze': { + 'type': 'str', + 'required': False, + }, + 'epudsasze': { + 'type': 'str', + 'required': False, + }, + 'erdsasze': { + 'type': 'str', + 'required': False, + }, + 'esdsasze': { + 'type': 'str', + 'required': False, + }, + 'esmexits': { + 'type': 'str', + 'required': False, + 'choices': ['NOINSTLN', 'INSTLN'], + }, + 'eudsasze': { + 'type': 'str', + 'required': False, + }, + 'fct': { + 'type': 'str', + 'required': False, + }, + 'fcqronly': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'fepi': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'fldsep': { + 'type': 'str', + 'required': False, + }, + 'fldstrt': { + 'type': 'str', + 'required': False, + }, + 'forceqr': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'fsstaff': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'ftimeout': { + 'type': 'int', + 'required': False, + }, + 'gmtext': { + 'type': 'str', + 'required': False, + }, + 'gmtran': { + 'type': 'str', + 'required': False, + }, + 'gntran': { + 'type': 'str', + 'required': False, + }, + 'grname': { + 'type': 'str', + 'required': False, + }, + 'grplist': { + 'type': 'str', + 'required': False, + }, + 'gtftr': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'hpo': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'httpserverhdr': { + 'type': 'str', + 'required': False, + }, + 'httpusragenthdr': { + 'type': 'str', + 'required': False, + }, + 'icp': { + 'type': 'str', + 'choices': ['COLD'], + 'required': False, + }, + 'icv': { + 'type': 'int', + 'required': False, + }, + 'icvr': { + 'type': 'int', + 'required': False, + }, + 'icvtsd': { + 'type': 'int', + 'required': False, + }, + 'infocenter': { + 'type': 'str', + 'required': False, + }, + 'initparm': { + 'type': 'str', + 'required': False, + }, + 'intrdrjobuser': + { + 'type': 'str', + 'required': False, + 'choices': ['TASK', 'REGION'], + }, + 'inttr': { + 'type': 'str', + 'required': False, + 'choices': ['ON', 'OFF'], + }, + 'ircstrt': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'isc': { + 'type': 'str', + 'required': False, + 'choices': ['YES', 'NO'], + }, + 'jesdi': { + 'type': 'int', + 'required': False, + }, + 'jvmprofiledir': { + 'type': 'str', + 'required': False, + }, + 'kerberosuser': { + 'type': 'str', + 'required': False, + }, + 'keyring': { + 'type': 'str', + 'required': False, + 'no_log': False, + }, + 'lgdfint': { + 'type': 'int', + 'required': False, + }, + 'lgnmsg': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'llacopy': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES', 'NEWCOPY'], + }, + 'localccsid': { + 'type': 'int', + 'required': False, + }, + 'lpa': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'maxopentcbs': { + 'type': 'int', + 'required': False, + }, + 'maxsockets': { + 'type': 'int', + 'required': False, + }, + 'maxssltcbs': { + 'type': 'int', + 'required': False, + }, + 'maxtlslevel': { + 'type': 'str', + 'required': False, + 'choices': ['TLS11', 'TLS12', 'TLS13'], + }, + 'maxxptcbs': { + 'type': 'int', + 'required': False, + }, + 'mct': { + 'type': 'str', + 'required': False, + }, + 'mintlslevel': { + 'type': 'str', + 'required': False, + 'choices': ['TLS11', 'TLS12', 'TLS13'], + }, + 'mn': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'mnconv': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'mnexc': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'mnfreq': { + 'type': 'int', + 'required': False, + }, + 'mnidn': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'mnper': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'mnres': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'mnsync': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'mntime': { + 'type': 'str', + 'required': False, + 'choices': ['GMT', 'LOCAL'], + }, + 'mqconn': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'mrobtch': { + 'type': 'int', + 'required': False, + }, + 'mrofse': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'mrolrm': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'msgcase': { + 'type': 'str', + 'required': False, + 'choices': ['MIXED', 'UPPER'], + }, + 'msglvl': { + 'type': 'int', + 'required': False, + 'choices': [1, 0], + }, + 'mxt': { + 'type': 'int', + 'required': False, + }, + 'natlang': { + 'type': 'str', + 'required': False, + 'choices': ['E', 'C', 'K'], + }, + 'ncpldft': { + 'type': 'str', + 'required': False, + }, + 'newsit': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'nistsp800131a': { + 'type': 'str', + 'required': False, + 'choices': ['NOCHECK', 'CHECK'], + }, + 'nonrlsrecov': { + 'type': 'str', + 'required': False, + 'choices': ['VSAMCAT', 'FILEDEF'], + }, + 'nqrnl': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'offsite': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'opertim': { + 'type': 'int', + 'required': False, + }, + 'opndlim': { + 'type': 'int', + 'required': False, + }, + 'parmerr': { + 'type': 'str', + 'required': False, + 'choices': ['INTERACT', 'IGNORE', 'ABEND'], + }, + 'pcdsasze': { + 'type': 'int', + 'required': False, + }, + 'pdi': { + 'type': 'int', + 'required': False, + }, + 'pdir': { + 'type': 'str', + 'required': False, + }, + 'pgaictlg': { + 'type': 'str', + 'required': False, + 'choices': ['MODIFY', 'NONE', 'ALL'], + }, + 'pgaiexit': { + 'type': 'str', + 'required': False, + }, + 'pgaipgm': { + 'type': 'str', + 'required': False, + 'choices': ['INACTIVE', 'ACTIVE'], + }, + 'pgchain': { + 'type': 'str', + 'required': False, + }, + 'pgcopy': { + 'type': 'str', + 'required': False, + }, + 'pgpurge': { + 'type': 'str', + 'required': False, + }, + 'pgret': { + 'type': 'str', + 'required': False, + }, + 'pltpi': { + 'type': 'str', + 'required': False, + }, + 'pltpisec': { + 'type': 'str', + 'required': False, + 'choices': ['NONE', 'CMDSEC', 'RESSEC', 'ALL'], + }, + 'pltpiusr': { + 'type': 'str', + 'required': False, + }, + 'pltsd': { + 'type': 'str', + 'required': False, + }, + 'prgdlay': { + 'type': 'int', + 'required': False, + }, + 'print': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES', 'PA1', 'PA2', 'PA3'], + }, + 'prtyage': { + 'type': 'int', + 'required': False, + }, + 'prvmod': { + 'type': 'str', + 'required': False, + }, + 'psbchk': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'psdint': { + 'type': 'int', + 'required': False, + }, + 'pstype': { + 'type': 'str', + 'required': False, + 'choices': ['SNPS', 'MNPS', 'NOPS'], + }, + 'pudsasze': { + 'type': 'str', + 'required': False, + }, + 'pvdelay': { + 'type': 'int', + 'required': False, + }, + 'quiestim': { + 'type': 'int', + 'required': False, + }, + 'racfsync': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES', 'CPSM'], + }, + 'ramax': { + 'type': 'int', + 'required': False, + }, + 'rapool': { + 'type': 'str', + 'required': False, + }, + 'rdsasze': { + 'type': 'str', + 'required': False, + }, + 'rentpgm': { + 'type': 'str', + 'required': False, + 'choices': ['PROTECT', 'NOPROTECT'], + }, + 'resoverrides': { + 'type': 'str', + 'required': False, + }, + 'resp': { + 'type': 'str', + 'required': False, + 'choices': ['FME', 'RRN'], + }, + 'ressec': { + 'type': 'str', + 'required': False, + 'choices': ['ASIS', 'ALWAYS'], + }, + 'rls': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'rlstolsr': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'rmtran': { + 'type': 'str', + 'required': False, + }, + 'rrms': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'rst': { + 'type': 'str', + 'required': False, + }, + 'rstsignoff': { + 'type': 'str', + 'required': False, + 'choices': ['NOFORCE', 'FORCE'], + }, + 'rstsigntime': { + 'type': 'int', + 'required': False, + }, + 'ruwapool': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'sdsasze': { + 'type': 'str', + 'required': False, + }, + 'sdtmemlimit': { + 'type': 'str', + 'required': False, + }, + 'sdtran': { + 'type': 'str', + 'required': False, + }, + 'sec': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'secprfx': { + 'type': 'str', + 'required': False, + }, + 'sit': { + 'type': 'str', + 'required': False + }, + 'skrxxxx': { + 'type': 'dict', + 'required': False, + }, + 'snpreset': { + 'type': 'str', + 'required': False, + 'choices': ['UNIQUE', 'SHARED'], + }, + 'snscope': { + 'type': 'str', + 'required': False, + 'choices': ['NONE', 'CICS', 'MVSIMAGE', 'SYSPLEX'], + }, + 'sotuning': { + 'type': 'str', + 'required': False, + 'choices': ['YES', 520], + }, + 'spctr': { + 'type': 'str', + 'required': False, + }, + 'spctrxx': { + 'type': 'dict', + 'required': False, + }, + 'spool': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'srbsvc': { + 'type': 'int', + 'required': False, + }, + 'srt': { + 'type': 'str', + 'required': False, + }, + 'srvercp': { + 'type': 'str', + 'required': False, + }, + 'sslcache': { + 'type': 'str', + 'required': False, + 'choices': ['CICS', 'SYSPLEX'], + }, + 'ssldelay': { + 'type': 'int', + 'required': False, + }, + 'start': { + 'type': 'str', + 'required': False, + 'choices': ['INITIAL', 'AUTO', 'COLD', 'STANDBY', '(INITIAL, ALL)', '(AUTO, ALL)', + '(COLD, ALL)', '(STANDBY, ALL)'], + }, + 'starter': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'stateod': { + 'type': 'int', + 'required': False, + }, + 'statint': { + 'type': 'int', + 'required': False, + }, + 'statrcd': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'stgprot': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'stgrcvy': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'stntr': { + 'type': 'str', + 'required': False, + }, + 'stntrxx': { + 'type': 'dict', + 'required': False, + }, + 'subtsks': { + 'type': 'int', + 'required': False, + 'choices': [0, 1], + }, + 'suffix': { + 'type': 'str', + 'required': False, + }, + 'sydumax': { + 'type': 'int', + 'required': False, + }, + 'sysidnt': { + 'type': 'str', + 'required': False, + }, + 'systr': { + 'type': 'str', + 'required': False, + 'choices': ['ON', 'OFF'], + }, + 'takeovr': { + 'type': 'str', + 'required': False, + 'choices': ['MANUAL', 'AUTO', 'COMMAND'], + }, + 'tbexits': { + 'type': 'str', + 'required': False, + }, + 'tcp': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'tcpip': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'tcsactn': { + 'type': 'str', + 'required': False, + 'choices': ['NONE', 'UNBIND', 'FORCE'], + }, + 'tcswait': { + 'type': 'str', + 'required': False, + }, + 'tct': { + 'type': 'str', + 'required': False, + }, + 'tctuakey': { + 'type': 'str', + 'required': False, + 'choices': ['USER', 'CICS'], + }, + 'tctualoc': { + 'type': 'str', + 'required': False, + 'choices': ['BELOW', 'ANY'], + }, + 'td': { + 'type': 'str', + 'required': False, + }, + 'tdintra': { + 'type': 'str', + 'required': False, + 'choices': ['NOEMPTY', 'EMPTY'], + }, + 'traniso': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'trap': { + 'type': 'str', + 'required': False, + 'choices': ['OFF', 'ON'], + }, + 'trdumax': { + 'type': 'int', + 'required': False, + }, + 'trtabsz': { + 'type': 'int', + 'required': False, + }, + 'trtransz': { + 'type': 'int', + 'required': False, + }, + 'trtranty': { + 'type': 'str', + 'required': False, + 'choices': ['TRAN', 'ALL'], + }, + 'ts': { + 'type': 'str', + 'required': False, + }, + 'tsmainlimit': { + 'type': 'str', + 'required': False, + }, + 'tst': { + 'type': 'str', + 'required': False, + }, + 'udsasze': { + 'type': 'str', + 'required': False, + }, + 'uownetql': { + 'type': 'str', + 'required': False, + }, + 'usertr': { + 'type': 'str', + 'required': False, + 'choices': ['ON', 'OFF'], + }, + 'usrdelay': { + 'type': 'int', + 'required': False, + }, + 'ussconfig': { + 'type': 'str', + 'required': False, + }, + 'usshome': { + 'type': 'str', + 'required': False, + }, + 'vtam': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'vtprefix': { + 'type': 'str', + 'required': False + }, + 'webdelay': { + 'type': 'str', + 'required': False, + }, + 'wlmhealth': { + 'type': 'str', + 'required': False, + }, + 'wrkarea': { + 'type': 'int', + 'required': False, + }, + 'xappc': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'xcfgroup': { + 'type': 'str', + 'required': False, + }, + 'xcmd': { + 'type': 'str', + 'required': False, + }, + 'xdb2': { + 'type': 'str', + 'required': False, + }, + 'xdct': { + 'type': 'str', + 'required': False, + }, + 'xfct': { + 'type': 'str', + 'required': False, + }, + 'xhfs': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'xjct': { + 'type': 'str', + 'required': False, + }, + 'xlt': { + 'type': 'str', + 'required': False, + }, + 'xpct': { + 'type': 'str', + 'required': False, + }, + 'xppt': { + 'type': 'str', + 'required': False, + }, + 'xpsb': { + 'type': 'str', + 'required': False, + }, + 'xptkt': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'xres': { + 'type': 'str', + 'required': False, + }, + 'xrf': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'], + }, + 'xtran': { + 'type': 'str', + 'required': False, + }, + 'xtst': { + 'type': 'str', + 'required': False, + }, + 'xuser': { + 'type': 'str', + 'required': False, + 'choices': ['NO', 'YES'] + }, + 'zosmoninterval': { + 'type': 'int', + 'required': False, + }, + 'zossosnewtcb': { + 'type': 'str', + 'required': False, + 'choices': ['DELAY', 'NODELAY'] + }, + 'zossos24unalloc': { + 'type': 'str', + 'required': False, + }, + 'zossos31unalloc': { + 'type': 'str', + 'required': False, + }, + 'zossos64unalloc': { + 'type': 'int', + 'required': False, + } + } + }, + } + + +def main(): + AnsibleRegionJCLModule().main() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/stop_region.py b/plugins/modules/stop_region.py new file mode 100644 index 00000000..75bd4872 --- /dev/null +++ b/plugins/modules/stop_region.py @@ -0,0 +1,336 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: stop_region +short_description: Stop a CICS region +description: + - Stop a CICS region by issuing a CEMT PERFORM SHUTDOWN command, or by canceling the job through the C(jobs.cancel) utility provided by + Z Open Automation Utilities (ZOAU). You can choose the shutdown mode from NORMAL, IMMEDIATE, or CANCEL. + - The O(job_id), O(job_name), or both can be used to shut down a CICS region. If mulitple jobs are running with the same name, the O(job_id) is required. + - During a NORMAL or IMMEDIATE shutdown, a shutdown assist transaction should run to enable CICS to shut down in a controlled manner. + By default, the CICS-supplied shutdown assist transaction, CESD is used. You can specify a custom shutdown assist transaction in the + SDTRAN system initialization parameter. The task runs until the region has successfully shut down, or until the shutdown fails. + - You must have a console installed in the CICS region so that the stop_region module can communicate with CICS. To define a console, + you must install a terminal with the CONSNAME attribute set to your TSO user ID. For detailed instructions, see + L(Defining TSO users as console devices,https://www.ibm.com/docs/en/cics-ts/latest?topic=cics-defining-tso-users-as-console-devices). + Add your console definition into one of the resource lists defined on the GRPLIST system initialization parameter so that it gets + installed into the CICS region. + Alternatively, you can use a DFHCSDUP script to update an existing CSD. This function is provided by the csd module. + - You can specify a timeout, in seconds, for CICS shutdown processing. After a request to stop CICS is issued, if CICS shutdown processing is not + completed when this timeout is reached, the module completes in a failed state. By default, the stop_region module does not use a timeout, that is, + the O(timeout) parameter assumes a value of -1. +version_added: 2.1.0 +author: + - Kiera Bennett (@KieraBennett) +options: + job_id: + description: + - Identifies the job ID belonging to the running CICS region. + - The stop_region module uses this job ID to identify the state of the CICS region and shut it down. + type: str + required: false + job_name: + description: + - Identifies the job name belonging to the running CICS region. + - The stop_region module uses this job name to identify the state of the CICS region and shut it down. + - The O(job_name) must be unique; if multiple jobs with the same name are running, use O(job_id). + type: str + required: false + mode: + description: + - Specify the type of shutdown to be executed on the CICS region. + - Specify C(normal) to perform a normal shutdown. This instructs the stop_region module to issue a CEMT PERFORM SHUTDOWN command. + - Specify C(immediate) to perform an immediate shutdown. This instructs the stop_region module to issue a CEMT PERFORM SHUTDOWN IMMEDIATE command. + - Specify C(cancel) to cancel the CICS region. This instructs the stop_region module to use ZOAU's C(jobs.cancel) utility to process the request. + type: str + required: false + default: normal + choices: + - normal + - immediate + - cancel + sdtran: + description: + - The 4-character identifier of the shutdown assist transaction. + - The default shutdown transaction, if neither SDTRAN nor NOSDTRAN is specified, is CESD. + type: str + required: false + no_sdtran: + description: + - No shutdown assist transaction is to be run at CICS shutdown. + type: bool + default: false + required: false + timeout: + description: + - The maximum time, in seconds, to wait for CICS shutdown processing to complete. + - Specify -1 to exclude a timeout. + type: int + default: -1 + required: false +''' + + +EXAMPLES = r''' +- name: "Stop CICS region using job ID" + ibm.ibm_zos_cics.stop_region: + job_id: JOB12345 + +- name: "Stop CICS region immediately using job ID" + ibm.ibm_zos_cics.stop_region: + job_id: JOB12354 + mode: immediate + +- name: "Stop CICS region using job name and job ID" + ibm.ibm_zos_cics.stop_region: + job_id: JOB12354 + job_name: MYREG01 + +- name: "Stop CICS region using job name" + ibm.ibm_zos_cics.stop_region: + job_name: ANS1234 + mode: normal + +- name: "Cancel CICS region using job name" + ibm.ibm_zos_cics.stop_region: + job_name: ANS1234 + mode: cancel +''' + +RETURN = r''' +changed: + description: True if the PERFORM SHUTDOWN or CANCEL command was executed. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + return: + description: The standard output returned by the program execution. + type: dict + returned: always + contains: + changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool + failed: + description: True if the module failed, otherwise False. + returned: always + type: bool + jobs: + description: The output information for a list of jobs matching the specified criteria. + type: list + returned: on zos_job_query module execution + elements: dict + contains: + job_id: + description: Unique job identifier assigned to the job by JES. + type: str + job_name: + description: The name of the batch job. + type: str + owner: + description: The owner who ran the job. + type: str + ret_code: + description: + Return code output collected from the job log. + type: dict + contains: + msg: + description: + Return code or abend resulting from the job submission. + type: str + msg_code: + description: + Return code extracted from the `msg` so that it can be evaluated. + For example, ABEND(S0C4) yields "S0C4". + type: str + msg_txt: + description: + Returns additional information related to the job. + type: str + code: + description: + Return code converted to an integer value (when possible). + type: int + steps: + description: + Series of JCL steps that were executed and their return codes. + type: list + elements: dict + contains: + step_name: + description: + Name of the step shown as "was executed" in the DD section. + type: str + step_cc: + description: + The CC returned for this step in the DD section. + type: int + message: + description: Message returned on failure. + returned: on zos_job_query module execution + type: str + content: + description: The resulting text from the command submitted. + returned: on zos_operator module execution + type: list + cmd: + description: The operator command that has been executed + returned: on zos_operator module execution + type: str + rc: + description: The return code from the operator command + returned: on zos_operator module execution + type: int + max_rc: + description: The maximum return code from the TSO status command + returned: on zos_tso_command module execution + type: int + output: + description: The output from the TSO command. + returned: on zos_tso_command module execution + type: list + elements: dict + contains: + command: + description: The executed TSO command. + returned: always + type: str + rc: + description: The return code from the executed TSO command. + returned: always + type: int + content: + description: The response resulting from the execution of the TSO command. + returned: always + type: list + lines: + description: The line number of the content. + returned: always + type: int + +msg: + description: A string containing an error message if applicable. + returned: always + type: str +''' + +from ansible.module_utils.basic import AnsibleModule + +CANCEL = 'cancel' +IMMEDIATE = 'immediate' +JOB_ID = 'job_id' +JOB_NAME = 'job_name' +MODE = 'mode' +NORMAL = 'normal' +NO_SDTRAN = 'no_sdtran' +SDTRAN = 'sdtran' +TIMEOUT = 'timeout' +TIMEOUT_DEFAULT = -1 + + +class AnsibleStopCICSModule(object): + + def __init__(self): + self._module = AnsibleModule( + argument_spec=self.init_argument_spec(), + mutually_exclusive=[(SDTRAN, NO_SDTRAN)], + required_one_of=[(JOB_ID, JOB_NAME)], + ) + self.failed = False + self.msg = "" + + def main(self): + if self._module.params.get(SDTRAN): + self._validate_sdtran(self._module.params[SDTRAN]) + if not self._module.params.get(JOB_ID) and not self._module.params.get(JOB_NAME): + self._fail("At least one of {0} or {1} must be specified".format( + JOB_ID, JOB_NAME)) + self.result = self.get_result() + self._module.exit_json(**self.result) + + def _validate_sdtran(self, transaction): # type: (str) -> None + if len(transaction) > 4: + self._fail( + "Value: {0}, is invalid. SDTRAN value must be 1-4 characters.".format(transaction)) + + def _fail(self, msg): # type: (str) -> None + self.failed = True + self.msg = msg + self.result = self.get_result() + self._module.fail_json(**self.result) + + def get_result(self): # type: () -> dict + return { + "changed": False, + "failed": self.failed, + "executions": [], + "msg": self.msg + } + + def init_argument_spec(self): + return { + JOB_ID: { + 'type': 'str', + 'required': False, + }, + JOB_NAME: { + 'type': 'str', + 'required': False, + }, + MODE: { + 'type': 'str', + 'required': False, + 'default': NORMAL, + 'choices': [NORMAL, IMMEDIATE, CANCEL], + }, + SDTRAN: { + 'type': 'str', + 'required': False, + }, + NO_SDTRAN: { + 'type': 'bool', + 'required': False, + 'default': False, + }, + TIMEOUT: { + 'type': 'int', + 'required': False, + 'default': TIMEOUT_DEFAULT, + } + } + + +def main(): + if __name__ == '__main__': + AnsibleStopCICSModule().main() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/td_intrapartition.py b/plugins/modules/td_intrapartition.py new file mode 100644 index 00000000..3aa278a9 --- /dev/null +++ b/plugins/modules/td_intrapartition.py @@ -0,0 +1,221 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = r""" +--- +module: td_intrapartition +short_description: Create and remove the CICS transient data intrapartition data set +description: + - Create and remove the L(transient data intrapartition,https://www.ibm.com/docs/en/cics-ts/latest?topic=data-defining-intrapartition-set) + data set used by a CICS® region. This data set holds all the data for intrapartition queues. + - You can use this module when provisioning or de-provisioning a CICS region. + - Use the O(state) option to specify the intended state for the transient data + intrapartition data set. For example, use O(state=initial) to create a transient data + intrapartition data set if it doesn't exist. +author: Andrew Twydell (@andrewtwydell) +version_added: 2.1.0 +extends_documentation_fragment: + - ibm.ibm_zos_cics.td_intrapartition +""" + + +EXAMPLES = r""" +- name: Initialize a transient data intrapartition data set by using the templated location + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "initial" + +- name: Initialize a user specified transient data intrapartition data set + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + dfhintra: + dsn: "REGIONS.ABCD0001.DFHINTRA" + state: "initial" + +- name: Initialize a large transient data intrapartition data set by using the templated location + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + space_primary: 50 + space_type: "M" + state: "initial" + +- name: Retain the existing state of a transient data intrapartition data set data set defined by the template + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + +- name: Retain the existing state of a user specified transient data intrapartition data set + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + dfhintra: + dsn: "REGIONS.ABCD0001.DFHINTRA" + state: "warm" + +- name: Delete a transient data intrapartition data set data set defined by the template + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "absent" + +- name: Delete a user specified transient data intrapartition data set + ibm.ibm_zos_cics.td_intrapartition: + region_data_sets: + dfhintra: + dsn: "REGIONS.ABCD0001.DFHINTRA" + state: "absent" +""" + + +RETURN = r""" +changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +start_state: + description: + - The state of the transient data intrapartition data set before the Ansible task runs. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the specified transient data intrapartition data set exists. + type: bool + returned: always +end_state: + description: The state of the transient data intrapartition data set at the end of the Ansible task. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "VSAM" + exists: + description: True if the specified transient data intrapartition data set exists. + type: bool + returned: always +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard output stream returned from the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always +msg: + description: A string containing an error message if applicable + returned: always + type: str +""" + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import _build_idcams_define_cmd +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + MEGABYTES, + KILOBYTES, + RECORDS, + CYLINDERS, + TRACKS, + REGION_DATA_SETS, + SPACE_PRIMARY, + SPACE_SECONDARY, + SPACE_TYPE, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._td_intrapartition import ( + _get_idcams_cmd_intra +) + + +DSN = "dfhintra" +SPACE_PRIMARY_DEFAULT = 100 +SPACE_SECONDARY_DEFAULT = 10 +SPACE_OPTIONS = [KILOBYTES, MEGABYTES, RECORDS, CYLINDERS, TRACKS] + + +class AnsibleTDIntrapartitionModule(DataSet): + def __init__(self): + super(AnsibleTDIntrapartitionModule, self).__init__(SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT) + self.name = self.region_param[DSN]["dsn"].upper() + self.expected_data_set_organization = "VSAM" + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleTDIntrapartitionModule, self)._get_arg_spec() + + arg_spec[SPACE_PRIMARY].update({ + "default": SPACE_PRIMARY_DEFAULT + }) + arg_spec[SPACE_SECONDARY].update({ + "default": SPACE_SECONDARY_DEFAULT + }) + arg_spec[SPACE_TYPE].update({ + "default": RECORDS, + "choices": SPACE_OPTIONS, + }) + arg_spec[REGION_DATA_SETS]["options"].update({ + DSN: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + }) + + return arg_spec + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN]["options"]["dsn"].pop("type") + return defs + + def create_data_set(self): # type: () -> None + create_cmd = _build_idcams_define_cmd(_get_idcams_cmd_intra(self.get_data_set())) + super().build_vsam_data_set(create_cmd) + + +def main(): + AnsibleTDIntrapartitionModule().main() + + +if __name__ == "__main__": + main() diff --git a/plugins/modules/transaction_dump.py b/plugins/modules/transaction_dump.py new file mode 100644 index 00000000..23984d76 --- /dev/null +++ b/plugins/modules/transaction_dump.py @@ -0,0 +1,290 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: transaction_dump +short_description: Allocate transaction dump data sets +description: + - Allocates the two L(transaction dump ,https://www.ibm.com/docs/en/cics-ts/latest?topic=sets-defining-transaction-dump-data) + data sets used by a CICS® region. + - The two data sets are referred to as transaction dump data set A (DFHDMPA) and transaction dump data set B (DFHDMPB). +author: Thomas Foyle (@tom-foyle) +version_added: 2.1.0 +extends_documentation_fragment: + - ibm.ibm_zos_cics.transaction_dump +''' + + +EXAMPLES = r""" +- name: Allocate transaction dump data set A (implicit) by using the templated location + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + +- name: Allocate a user specified data set as transaction dump data set A (implicit) + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpa: + dsn: "REGIONS.ABCD0001.DFHDMPA" + state: initial + +- name: Allocate transaction dump data set A by using the templated location + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + destination: A + +- name: Allocate a user specified data set as transaction dump data set A + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpa: + dsn: "REGIONS.ABCD0001.DFHDMPA" + state: initial + destination: A + +- name: Allocate transaction dump data set B by using the templated location + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: initial + destination: B + +- name: Allocate a user specified data set as transaction dump data set B + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpb: + dsn: "REGIONS.ABCD0001.DFHDMPB" + state: initial + destination: B + +- name: Retain the existing state of transaction dump data set A (implicit) defined by the template + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + +- name: Retain the existing state of a user specified transaction dump data set A (implicit) + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpa: + dsn: "REGIONS.ABCD0001.DFHDMPA" + state: "warm" + +- name: Retain the existing state of transaction dump data set B defined by the template + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: "warm" + destination: B + +- name: Retain the existing state of a user specified transaction dump data set B + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpb: + dsn: "REGIONS.ABCD0001.DFHDMPB" + state: "warm" + destination: B + +- name: Delete transaction dump data set A (implicit) defined by the template + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: absent + +- name: Delete a user specified transaction dump data set A (implicit) + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpa: + dsn: "REGIONS.ABCD0001.DFHDMPA" + state: absent + +- name: Delete transaction dump data set B defined by the template + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + template: "REGIONS.ABCD0001.<< data_set_name >>" + state: absent + destination: B + +- name: Delete a user specified transaction dump data set B + ibm.ibm_zos_cics.transaction_dump: + region_data_sets: + dfhdmpb: + dsn: "REGIONS.ABCD0001.DFHDMPB" + state: absent + destination: B +""" + + +RETURN = r""" +changed: + description: True if the state was changed, otherwise False. + returned: always + type: bool +failed: + description: True if the Ansible task failed, otherwise False. + returned: always + type: bool +start_state: + description: + - The state of the transaction dump data set before the Ansible task runs. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the start of the Ansible task. + returned: always + type: str + sample: "Sequential" + exists: + description: True if the specified transaction dump data set exists. + type: bool + returned: always +end_state: + description: The state of the transaction dump data set at the end of the Ansible task. + returned: always + type: dict + contains: + data_set_organization: + description: The organization of the data set at the end of the Ansible task. + returned: always + type: str + sample: "Sequential" + exists: + description: True if the specified transaction dump data set exists. + type: bool + returned: always +executions: + description: A list of program executions performed during the Ansible task. + returned: always + type: list + elements: dict + contains: + name: + description: A human-readable name for the program execution. + type: str + returned: always + rc: + description: The return code for the program execution. + type: int + returned: always + stdout: + description: The standard output stream returned from the program execution. + type: str + returned: always + stderr: + description: The standard error stream returned from the program execution. + type: str + returned: always +msg: + description: A string containing an error message if applicable + returned: always + type: str +""" + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import ( + DESTINATION, + DESTINATION_OPTIONS, + DESTINATION_DEFAULT_VALUE, + MEGABYTES, + REGION_DATA_SETS, + SPACE_PRIMARY, + SPACE_SECONDARY, + SPACE_TYPE, + DataSet +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._transaction_dump import ( + _build_seq_data_set_definition_transaction_dump +) + + +DSN_A = "dfhdmpa" +DSN_B = "dfhdmpb" +SPACE_PRIMARY_DEFAULT = 20 +SPACE_SECONDARY_DEFAULT = 4 + + +class AnsibleTransactionDumpModule(DataSet): + def __init__(self): # type: () -> None + self.ds_destination = "" + super(AnsibleTransactionDumpModule, self).__init__(SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT) + self.ds_destination = DSN_B if self.destination == "B" else DSN_A + self.name = self.region_param[self.ds_destination]["dsn"].upper() + self.expected_data_set_organization = "Sequential" + + def _get_arg_spec(self): # type: () -> dict + arg_spec = super(AnsibleTransactionDumpModule, self)._get_arg_spec() + + arg_spec.update({ + DESTINATION: { + "type": "str", + "choices": DESTINATION_OPTIONS, + "default": DESTINATION_DEFAULT_VALUE + } + }) + + arg_spec[SPACE_PRIMARY].update({ + "default": SPACE_PRIMARY_DEFAULT + }) + arg_spec[SPACE_SECONDARY].update({ + "default": SPACE_SECONDARY_DEFAULT + }) + arg_spec[SPACE_TYPE].update({ + "default": MEGABYTES + }) + arg_spec[REGION_DATA_SETS]["options"].update({ + DSN_A: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + DSN_B: { + "type": "dict", + "required": False, + "options": { + "dsn": { + "type": "str", + "required": False, + }, + }, + }, + }) + + return arg_spec + + def get_arg_defs(self): # type: () -> dict + defs = super().get_arg_defs() + defs[REGION_DATA_SETS]["options"][DSN_A]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN_B]["options"]["dsn"].update({ + "arg_type": "data_set_base" + }) + defs[REGION_DATA_SETS]["options"][DSN_A]["options"]["dsn"].pop("type") + defs[REGION_DATA_SETS]["options"][DSN_B]["options"]["dsn"].pop("type") + return defs + + def create_data_set(self): # type: () -> None + definition = _build_seq_data_set_definition_transaction_dump(self.get_data_set()) + super().build_seq_data_set(self.ds_destination, definition) + + +def main(): + AnsibleTransactionDumpModule().main() + + +if __name__ == '__main__': + main() diff --git a/plugins/plugin_utils/_module_action_plugin.py b/plugins/plugin_utils/_module_action_plugin.py new file mode 100644 index 00000000..544a4444 --- /dev/null +++ b/plugins/plugin_utils/_module_action_plugin.py @@ -0,0 +1,169 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +# FOR INTERNAL USE IN THE COLLECTION ONLY. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.plugins.action import ActionBase + +REGION_DS_KEYS = ["dfhgcd", "dfhlcd", "dfhintra", "dfhlrq", "dfhtemp", "dfhauxt", "dfhbuxt", "dfhdmpa", "dfhdmpb", "dfhcsd", "dfhstart"] +CICS_DS_KEYS = ["sdfhload", "sdfhauth", "sdfhlic"] +LE_DS_KEYS = ["sceecics", "sceerun", "sceerun2"] +CPSM_DS_KEYS = ["seyuauth", "seyuload"] +LIBRARY_KEYS = ["steplib", "dfhrpl"] + + +class _DataSetActionPlugin(ActionBase): + def _run(self, ds_name, module_name, cics_data_sets_required, tmp=None, task_vars=None): + super(_DataSetActionPlugin, self).run(tmp, task_vars) + self.module_args = self._task.args.copy() + + return_structure = { + "failed": False, + "changed": False, + "msg": "", + "executions": [], + "start_state": {"data_set_organization": "NONE", "exists": False}, + "end_state": {"data_set_organization": "NONE", "exists": False}, + } + + try: + self._process_module_args( + self.module_args, + self._templar, + ds_name, + task_vars, + cics_data_sets_required, + ) + except (KeyError, ValueError) as e: + return_structure.update({ + "failed": True, + "changed": False, + "msg": e.args[0], + }) + else: + return_structure.update( + self._execute_module( + module_name="ibm.ibm_zos_cics.{0}".format(module_name), + module_args=self.module_args, + task_vars=task_vars, + tmp=tmp, + ) + ) + + return return_structure + + def _process_module_args(self, module_args, _templar, ds_name, task_vars, cics_data_sets_required): + _process_module_args(module_args, _templar, ds_name, task_vars, cics_data_sets_required) + + +def _process_module_args(module_args, _templar, ds_name, task_vars, cics_data_sets_required): + _process_region_data_set_args(module_args, _templar, ds_name, task_vars) + _remove_region_data_set_args(module_args, ds_name) + + if cics_data_sets_required: + _process_libraries_args(module_args, _templar, task_vars, "cics_data_sets", "sdfhload") + _remove_cics_data_set_args(module_args, "sdfhload") + else: + if module_args.get("cics_data_sets"): + del module_args["cics_data_sets"] + + if module_args.get("le_data_sets"): + del module_args["le_data_sets"] + + if module_args.get("cpsm_data_sets"): + del module_args["cpsm_data_sets"] + + +def _check_region_override(module_args, ds_name): + return module_args["region_data_sets"].get(ds_name, {}).get("dsn") is not None + + +def _check_library_override(module_args, lib_type, lib_ds_name): + if module_args[lib_type].get(lib_ds_name): + return True + else: + return False + + +def _remove_region_data_set_args(module_args, ds_name): + for region_key in list(module_args["region_data_sets"]): + if region_key in REGION_DS_KEYS and region_key != ds_name: + del module_args["region_data_sets"][region_key] + + +def _remove_cics_data_set_args(module_args, ds_name): + for cics_key in list(module_args["cics_data_sets"]): + if cics_key in CICS_DS_KEYS and cics_key != ds_name: + del module_args["cics_data_sets"][cics_key] + + +def _process_region_data_set_args(module_args, _templar, ds_name, task_vars): + if not module_args.get("region_data_sets"): + raise KeyError("Required argument region_data_sets not found") + + if not _check_region_override(module_args, ds_name): + if _check_template(module_args, "region_data_sets"): + module_args["region_data_sets"].update({ + ds_name: { + "dsn": _template_dsn( + _templar=_templar, + task_vars=task_vars, + var_name="data_set_name", + replace_val=ds_name.upper(), + template=module_args["region_data_sets"]["template"], + ) + }} + ) + else: + raise KeyError("No template or data set override found for {0}".format(ds_name)) + return _validate_data_set_length(module_args["region_data_sets"][ds_name]["dsn"]) + + +def _validate_list_of_data_set_lengths(data_set_list): + for data_set in data_set_list: + _validate_data_set_length(data_set) + + +def _validate_data_set_length(data_set): + if len(data_set) > 44: + raise ValueError("Data set: {0} is longer than 44 characters.".format(data_set)) + + +def _process_libraries_args(module_args, _templar, task_vars, lib_type, lib_ds_name): + if not _check_library_override(module_args, lib_type, lib_ds_name): + if _check_template(module_args, lib_type): + module_args[lib_type][lib_ds_name] = _template_dsn( + _templar=_templar, + task_vars=task_vars, + var_name="lib_name", + replace_val=lib_ds_name.upper(), + template=module_args[lib_type]["template"], + ) + else: + raise KeyError("No template or library override found for {0}".format(lib_ds_name)) + return _validate_data_set_length(module_args[lib_type][lib_ds_name]) + + +def _template_dsn(_templar, task_vars, var_name, replace_val, template): + cpy = task_vars.copy() + cpy.update({var_name: replace_val}) + return _templar.copy_with_new_env( + variable_start_string="<<", + variable_end_string=">>", + available_variables=cpy, + ).template(template) + + +def _check_template(module_args, arg_dict): + return module_args.get(arg_dict, {}).get("template") is not None + + +def _set_top_libraries_key(module_args, dict_key): + if module_args.get(dict_key) is None: + module_args[dict_key] = {"top_data_sets": []} + elif module_args[dict_key].get("top_data_sets") is None: + module_args[dict_key].update({"top_data_sets": []}) diff --git a/requirements.txt b/requirements.txt index 2c607ec2..1008c258 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) -requests==2.31.0 +requests==2.32.3 xmltodict==0.12.0 urllib3==1.26.18 \ No newline at end of file diff --git a/tests/integration/targets/cics_cmci/cmci-variables.yml.template b/tests/integration/targets/cics_cmci/cmci-variables.yml.template deleted file mode 100644 index 9d4fc064..00000000 --- a/tests/integration/targets/cics_cmci/cmci-variables.yml.template +++ /dev/null @@ -1,13 +0,0 @@ -# (c) Copyright IBM Corp. 2020,2021 -# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) ---- -cmci_host: winmvs28.hursley.ibm.com -cmci_port: 28953 -cmci_secure_port: 28951 -cmci_user: __user__ -cmci_password: __password__ -cmci_context: CICSEX56 -cmci_scope_https: IYCWEMW2 -cmci_scope_http: IYCWEMW1 -cmci_scope_region_1: IYCWEML1 -cmci_scope_region_2: IYCWEMM1 \ No newline at end of file diff --git a/tests/integration/targets/cics_cmci/playbooks/cics_cmci_http.yml b/tests/integration/targets/cics_cmci/playbooks/cics_cmci_http.yml index 18f25e28..4109a6c6 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cics_cmci_http.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cics_cmci_http.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI HTTP Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cics_cmci_https.yml b/tests/integration/targets/cics_cmci/playbooks/cics_cmci_https.yml index a0dbac02..8ff894d7 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cics_cmci_https.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cics_cmci_https.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI HTTPS Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults_cmci.yml b/tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults_cmci.yml new file mode 100644 index 00000000..94512dcd --- /dev/null +++ b/tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults_cmci.yml @@ -0,0 +1,535 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: CMCI Group Module_Default Integration Tests + hosts: localhost + gather_facts: false + vars: + csdgroup: "{{ cmci_program_name_1 }}" + program: "{{ cmci_program_name_1 }}" + program_2: "{{ cmci_program_name_2 }}" + program_filter: "{{ cmci_program_filter }}" + error_msg_27: "missing required arguments: cmci_host, cmci_port, context" + error_msg_38: CMCI request failed with response "NODATA" reason "1027" + error_msg: "{{ error_msg_27 if ansible_version.minor <= 11 else error_msg_38 }}" + + module_defaults: + group/ibm.ibm_zos_cics.cmci: + cmci_host: "{{ cmci_host }}" + cmci_port: "{{ cmci_port }}" + cmci_user: "{{ cmci_user }}" + cmci_password: "{{ cmci_password }}" + context: "{{ cmci_context }}" + scope: "{{ cmci_scope }}" + insecure: true + scheme: http + + tasks: + ################################################################################## + # Initial cleanup + ################################################################################## + - name: HTTP Disable program + delegate_to: localhost + cmci_update: + type: CICSProgram + cmci_host: "{{ cmci_host }}" + cmci_port: "{{ cmci_port }}" + cmci_user: "{{ cmci_user }}" + cmci_password: "{{ cmci_password }}" + context: "{{ cmci_context }}" + scope: "{{ cmci_scope }}" + insecure: true + scheme: http + attributes: + status: disabled + resources: + filter: + PROGRAM: "{{ program }}" + complex_filter: + and: + - attribute: PROGRAM + operator: "=" + value: "{{ program }}" + - or: + - attribute: USECOUNT + operator: "!=" + value: "0" + - attribute: USECOUNT + operator: LT + value: "1" + register: result + failed_when: > + 'cpsm_response' not in result or result.cpsm_response not in ['OK', 'NODATA'] + + + - name: HTTP Delete program + delegate_to: localhost + cmci_delete: + type: CICSProgram + cmci_host: "{{ cmci_host }}" + cmci_port: "{{ cmci_port }}" + cmci_user: "{{ cmci_user }}" + cmci_password: "{{ cmci_password }}" + context: "{{ cmci_context }}" + scope: "{{ cmci_scope }}" + insecure: true + scheme: http + resources: + filter: + PROGRAM: "{{ program }}" + register: result + failed_when: > + 'cpsm_response' not in result or result.cpsm_response not in ['OK', 'NODATA'] + + + - name: Delete progdef + delegate_to: localhost + ibm.ibm_zos_cics.cmci_delete: + type: CICSDefinitionProgram + cmci_host: "{{ cmci_host }}" + cmci_port: "{{ cmci_port }}" + cmci_user: "{{ cmci_user }}" + cmci_password: "{{ cmci_password }}" + context: "{{ cmci_context }}" + scope: "{{ cmci_scope }}" + insecure: true + scheme: http + resources: + filter: + NAME: "{{ program }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: > + 'cpsm_response' not in result or result.cpsm_response not in ['OK', 'NODATA'] + + - name: Delete progdef 2 + delegate_to: localhost + ibm.ibm_zos_cics.cmci_delete: + type: CICSDefinitionProgram + cmci_host: "{{ cmci_host }}" + cmci_port: "{{ cmci_port }}" + cmci_user: "{{ cmci_user }}" + cmci_password: "{{ cmci_password }}" + context: "{{ cmci_context }}" + scope: "{{ cmci_scope }}" + insecure: true + scheme: http + resources: + filter: + NAME: "{{ program_2 }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: > + 'cpsm_response' not in result or result.cpsm_response not in ['OK', 'NODATA'] + + ################################################################################## + # Main Test + ################################################################################## + - name: HTTP Create progdef + delegate_to: localhost + cmci_create: + type: CICSDefinitionProgram + attributes: + name: "{{ program }}" + csdgroup: "{{ csdgroup }}" + create_parameters: + - name: CSD + register: result + failed_when: false + + - name: Assert 1 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + - result.records[0].name == program + + - name: Assert 1 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Update progdef + delegate_to: localhost + cmci_update: + type: CICSDefinitionProgram + attributes: + description: foo + resources: + filter: + NAME: "{{ program }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: false + + - name: Assert 2 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + - result.records[0].description == 'foo' + + - name: Assert 2 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Install program + delegate_to: localhost + cmci_action: + type: CICSDefinitionProgram + action_name: CSDINSTALL + resources: + filter: + NAME: "{{ program }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: false + + - name: Assert 3 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + + - name: Assert 3 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Check program was installed + delegate_to: localhost + cmci_get: + type: CICSProgram + resources: + filter: + PROGRAM: "{{ program }}" + retries: 3 # May take a while to install, so give it a chance! + until: result is not failed + register: result + failed_when: false + + - name: Assert 4 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is not changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + - result.records[0].program == program + + - name: Assert 4 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Disable program + delegate_to: localhost + cmci_update: + type: CICSProgram + attributes: + status: disabled + resources: + filter: + PROGRAM: "{{ program }}" + complex_filter: + and: + - attribute: PROGRAM + operator: "=" + value: "{{ program }}" + - or: + - attribute: USECOUNT + operator: "!=" + value: "0" + - attribute: USECOUNT + operator: LT + value: "1" + register: result + failed_when: false + + - name: Assert 5 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + - result.records[0].program == program + - result.records[0].status == 'DISABLED' + + - name: Assert 5 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Delete program + delegate_to: localhost + cmci_delete: + type: CICSProgram + resources: + filter: + PROGRAM: "{{ program }}" + register: result + failed_when: false + + - name: Assert 6 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + - result.success_count == 1 + + - name: Assert 6 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Create progdef 2 + delegate_to: localhost + cmci_create: + type: CICSDefinitionProgram + attributes: + name: "{{ program_2 }}" + csdgroup: "{{ csdgroup }}" + create_parameters: + - name: CSD + register: result + failed_when: false + + - name: Assert program_2 created 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + - result.records[0].name == program_2 + + - name: Assert program_2 created 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Check All Records Returned + delegate_to: localhost + cmci_get: + type: CICSDefinitionProgram + resources: + filter: + name: "{{ program_filter }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: false + + - name: Assert record_count is 2 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is not changed + - result.cpsm_response == 'OK' + - result.record_count == 2 + + - name: Assert record_count is 2 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Check record count attribute + delegate_to: localhost + cmci_get: + type: CICSDefinitionProgram + record_count: 1 + resources: + filter: + name: "{{ program_filter }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: false + + - name: Assert record_count attribute 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is not changed + - result.cpsm_response == 'OK' + - result.record_count == 2 + - result.records|length == 1 + + - name: Assert record_count attribute 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Delete progdef + delegate_to: localhost + cmci_delete: + type: CICSDefinitionProgram + resources: + filter: + NAME: "{{ program }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: false + + - name: Assert 7 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + - result.success_count == 1 + + - name: Assert 7 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP Delete progdef2 + delegate_to: localhost + cmci_delete: + type: CICSDefinitionProgram + resources: + filter: + NAME: "{{ program_2 }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: false + + - name: Assert program_2 deleted 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is changed + - result.cpsm_response == 'OK' + - result.record_count == 1 + - result.success_count == 1 + + - name: Assert program_2 deleted 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP fail_on_nodata default + delegate_to: localhost + cmci_get: + type: CICSDefinitionProgram + resources: + filter: + name: "{{ program_filter }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: false + + - name: Assert fail_on_nodata attribute default 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is not changed + - result.failed == false + - result.cpsm_response == 'NODATA' + - result.cpsm_response_code == 1027 + - result.record_count == 0 + - result.msg is defined + - result.failed_when_result is defined + + - name: Assert fail_on_nodata attribute default 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg + + - name: HTTP fail_on_nodata + delegate_to: localhost + cmci_get: + type: CICSDefinitionProgram + fail_on_nodata: false + resources: + filter: + name: "{{ program_filter }}" + get_parameters: + - name: CSDGROUP + value: "{{ csdgroup }}" + register: result + failed_when: false + + - name: Assert fail_on_nodata attribute false 2.12 + when: ansible_version.minor >= 12 + ansible.builtin.assert: + that: + - result is not changed + - result.failed == false + - result.cpsm_response == 'NODATA' + - result.cpsm_response_code == 1027 + - result.record_count == 0 + - result.msg is not defined + - result.failed_when_result == false + + - name: Assert fail_on_nodata attribute false 2.11 + when: ansible_version.minor <= 11 + ansible.builtin.assert: + that: + - result.changed == false + - result.failed_when_result == false + - result.msg == error_msg diff --git a/tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults.yml b/tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults_cmci_group.yml similarity index 99% rename from tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults.yml rename to tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults_cmci_group.yml index 8318f291..d626941a 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cics_cmci_module_defaults_cmci_group.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Group Module_Default Integration Tests diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_bas_install.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_bas_install.yml index 76ddb963..32d96e5d 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_bas_install.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_bas_install.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI BAS Install Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_bas_install_error.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_bas_install_error.yml index fce13205..6c53f478 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_bas_install_error.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_bas_install_error.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI BAS Install Error Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_bas_link.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_bas_link.yml index df26d194..376d7179 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_bas_link.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_bas_link.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI BAS Link Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_create_pipeline_failure.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_create_pipeline_failure.yml index 2e27d4cd..cf17966a 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_create_pipeline_failure.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_create_pipeline_failure.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Create Pipeline Failure Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_context.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_context.yml index 09d3d569..1de7cd9f 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_context.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_context.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Incorrect Context Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_host.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_host.yml index ebf2c4b4..19e1ab58 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_host.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_host.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Incorrect Host Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_port.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_port.yml index 356d3ac4..99fae8eb 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_port.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_port.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Incorrect Port Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_scheme.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_scheme.yml index e4437758..e3abd4e5 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_scheme.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_scheme.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Incorrect Scheme Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_scope.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_scope.yml index 5cbdae17..f23fe021 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_scope.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_incorrect_scope.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Incorrect Scope Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_insecure_false.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_insecure_false.yml index 18530f10..3a8d76e1 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_insecure_false.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_insecure_false.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Insecure False Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_install_bundle_failure.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_install_bundle_failure.yml index 7ecffa37..d2b181db 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_install_bundle_failure.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_install_bundle_failure.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Install Bundle Failure Integration Test diff --git a/tests/integration/targets/cics_cmci/playbooks/cmci_invalid_credentials.yml b/tests/integration/targets/cics_cmci/playbooks/cmci_invalid_credentials.yml index 057e3f83..5cb104b0 100644 --- a/tests/integration/targets/cics_cmci/playbooks/cmci_invalid_credentials.yml +++ b/tests/integration/targets/cics_cmci/playbooks/cmci_invalid_credentials.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Invalid Credentials Integration Test diff --git a/tests/integration/targets/cics_cmci/runme.sh b/tests/integration/targets/cics_cmci/runme.sh index 262d7d39..f4b6dd85 100755 --- a/tests/integration/targets/cics_cmci/runme.sh +++ b/tests/integration/targets/cics_cmci/runme.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) set -eux # This is important to ensure that return codes from failing tests are propagated @@ -19,4 +19,5 @@ ansible-playbook -e "@$VAR_PATH" playbooks/cmci_incorrect_scheme.yml ansible-playbook -e "@$VAR_PATH" playbooks/cmci_bas_link.yml ansible-playbook -e "@$VAR_PATH" playbooks/cmci_bas_install.yml ansible-playbook -e "@$VAR_PATH" playbooks/cmci_bas_install_error.yml -ansible-playbook -e "@$VAR_PATH" playbooks/cics_cmci_module_defaults.yml +ansible-playbook -e "@$VAR_PATH" playbooks/cics_cmci_module_defaults_cmci.yml +ansible-playbook -e "@$VAR_PATH" playbooks/cics_cmci_module_defaults_cmci_group.yml diff --git a/tests/integration/targets/cics_cmci_missing_requests_library/playbooks/cmci_missing_requests.yml b/tests/integration/targets/cics_cmci_missing_requests_library/playbooks/cmci_missing_requests.yml index 93f28403..1a64f6b6 100644 --- a/tests/integration/targets/cics_cmci_missing_requests_library/playbooks/cmci_missing_requests.yml +++ b/tests/integration/targets/cics_cmci_missing_requests_library/playbooks/cmci_missing_requests.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Missing Requests Integration Test diff --git a/tests/integration/targets/cics_cmci_missing_requests_library/runme.sh b/tests/integration/targets/cics_cmci_missing_requests_library/runme.sh index 6bdd14a1..6b2826bc 100755 --- a/tests/integration/targets/cics_cmci_missing_requests_library/runme.sh +++ b/tests/integration/targets/cics_cmci_missing_requests_library/runme.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) set -eux # This is important to ensure that return codes from failing tests are propagated if pip show requests 2>&1 | grep -q 'Package(s) not found'; then diff --git a/tests/integration/targets/cics_cmci_missing_xmltodict_library/playbooks/cmci_missing_xmltodict.yml b/tests/integration/targets/cics_cmci_missing_xmltodict_library/playbooks/cmci_missing_xmltodict.yml index a55eb93c..c61478e4 100644 --- a/tests/integration/targets/cics_cmci_missing_xmltodict_library/playbooks/cmci_missing_xmltodict.yml +++ b/tests/integration/targets/cics_cmci_missing_xmltodict_library/playbooks/cmci_missing_xmltodict.yml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- - name: CMCI Missing XMLToDict Library Integration Test diff --git a/tests/integration/targets/cics_cmci_missing_xmltodict_library/runme.sh b/tests/integration/targets/cics_cmci_missing_xmltodict_library/runme.sh index e7c42f99..fae93081 100755 --- a/tests/integration/targets/cics_cmci_missing_xmltodict_library/runme.sh +++ b/tests/integration/targets/cics_cmci_missing_xmltodict_library/runme.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# (c) Copyright IBM Corp. 2021 +# (c) Copyright IBM Corp. 2021,2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) set -eux # This is important to ensure that return codes from failing tests are propagated if pip show xmltodict 2>&1 | grep -q 'Package(s) not found'; then diff --git a/tests/integration/targets/cics_csd/playbooks/csdup_script.yml b/tests/integration/targets/cics_csd/playbooks/csdup_script.yml new file mode 100644 index 00000000..3ed1808a --- /dev/null +++ b/tests/integration/targets/cics_csd/playbooks/csdup_script.yml @@ -0,0 +1,228 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Test CSDUP script for csd module + + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + vars: + data_set_name: "DFHCSD" + data_set_path: "{{ region_data_set_path }}.{{ data_set_name }}" + script_data_set_path: "{{ region_data_set_path }}.CSDUP" + script_uss_path: "{{ uss_path }}/script.csdup" + script_local_path: "{{ playbook_dir }}/script.csdup" + + module_defaults: + ibm.ibm_zos_cics.csd: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + + tasks: + # ############################################################################# + # ############################## Initial Cleanup ############################## + # ############################################################################# + + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed + + - name: Delete {{ script_data_set_path }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ script_data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed + + - name: Delete {{ script_uss_path }} + ansible.builtin.file: + path: "{{ script_uss_path }}" + state: absent + + # ############################################################################# + # ############################## Module Testing ############################### + # ############################################################################# + + - name: Wrap test in block so cleanup always runs + block: + - name: Run csd module with initial state + ibm.ibm_zos_cics.csd: + state: "initial" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} created + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + + # ############################################################################# + + - name: Create a data set with a script in it + ibm.ibm_zos_core.zos_copy: + content: "DEFINE PROGRAM(TESTPRG1) GROUP(TESTGRP1)" + dest: "{{ script_data_set_path }}" + dest_data_set: + type: "SEQ" + record_format: "FB" + register: result + retries: 3 + until: result is not failed + + - name: Run csd module with a DFHCSDUP script from a data set + ibm.ibm_zos_cics.csd: + state: "changed" + input_location: "DATA_SET" + input_src: "{{ script_data_set_path }}" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert script was run + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.executions|select("search", "PROGRAM TESTPRG1 DEFINED IN GROUP TESTGRP1") + - result.msg == "" + - result.start_state.exists == true + - result.end_state.exists == true + + # ############################################################################# + + - name: Create a USS file with a script in it + ansible.builtin.shell: | + mkdir -p {{ uss_path }} + echo "DEFINE PROGRAM(TESTPRG2) GROUP(TESTGRP2)" > {{ script_uss_path }} + changed_when: true + + - name: Run csd module with a DFHCSDUP script from a USS file + ibm.ibm_zos_cics.csd: + state: "changed" + input_location: "USS" + input_src: "{{ script_uss_path }}" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert script was run + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.executions|select("search", "PROGRAM TESTPRG2 DEFINED IN GROUP TESTGRP2") + - result.msg == "" + - result.start_state.exists == true + - result.end_state.exists == true + + # ############################################################################# + + - name: Run csd module with a DFHCSDUP script from a local file + ibm.ibm_zos_cics.csd: + state: "changed" + input_location: "LOCAL" + input_src: "{{ script_local_path }}" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert script was run + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.executions|select("search", "PROGRAM TESTPRG3 DEFINED IN GROUP TESTGRP3") + - result.msg == "" + - result.start_state.exists == true + - result.end_state.exists == true + + # ############################################################################# + + - name: Run csd module with a DFHCSDUP script inline (single line) + ibm.ibm_zos_cics.csd: + state: "changed" + input_src: "{{ script_local_path }}" + input_location: "INLINE" + input_content: "DEFINE PROGRAM(TESTPRG4) GROUP(TESTGRP4)" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert script was run + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.executions|select("search", "PROGRAM TESTPRG4 DEFINED IN GROUP TESTGRP4") + - result.msg == "" + - result.start_state.exists == true + - result.end_state.exists == true + + # ############################################################################# + + - name: Run csd module with a DFHCSDUP script inline (multiple lines) + ibm.ibm_zos_cics.csd: + state: "changed" + input_src: "{{ script_local_path }}" + input_location: "INLINE" + input_content: | + DEFINE PROGRAM(TESTPRG5) GROUP(TESTGRP5) + DEFINE PROGRAM(TESTPRG6) GROUP(TESTGRP6) + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert script was run + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.executions|select("search", "PROGRAM TESTPRG5 DEFINED IN GROUP TESTGRP5") + - result.executions|select("search", "PROGRAM TESTPRG6 DEFINED IN GROUP TESTGRP6") + - result.msg == "" + - result.start_state.exists == true + - result.end_state.exists == true + + # ############################################################################# + # ################################## Cleanup ################################## + # ############################################################################# + + always: + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is changed + + - name: Delete {{ script_data_set_path }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ script_data_set_path }}" + state: absent + register: result + retries: 3 + until: result is changed + + - name: Delete {{ script_uss_path }} + ansible.builtin.file: + path: "{{ script_uss_path }}" + state: absent diff --git a/tests/integration/targets/cics_csd/playbooks/script.csdup b/tests/integration/targets/cics_csd/playbooks/script.csdup new file mode 100644 index 00000000..32a4ae79 --- /dev/null +++ b/tests/integration/targets/cics_csd/playbooks/script.csdup @@ -0,0 +1 @@ +DEFINE PROGRAM(TESTPRG3) GROUP(TESTGRP3) \ No newline at end of file diff --git a/tests/integration/targets/cics_csd/runme.sh b/tests/integration/targets/cics_csd/runme.sh new file mode 100755 index 00000000..fd63921a --- /dev/null +++ b/tests/integration/targets/cics_csd/runme.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +set -eux + +VAR_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/provisioning.yml" +INV_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/inventory_zos.yml" +ZOS_ENV="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/zos.yml" + +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/csdup_script.yml diff --git a/tests/integration/targets/cics_data_set/playbooks/destination_template.yml b/tests/integration/targets/cics_data_set/playbooks/destination_template.yml new file mode 100644 index 00000000..ff88bc2e --- /dev/null +++ b/tests/integration/targets/cics_data_set/playbooks/destination_template.yml @@ -0,0 +1,177 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Test Destination for MODULE_NAME module + + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + vars: + data_set_path_A: "{{ region_data_set_path }}.{{ data_set_name_A }}" + data_set_path_B: "{{ region_data_set_path }}.{{ data_set_name_B }}" + + module_defaults: + ibm.ibm_zos_cics.MODULE_NAME: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + + tasks: + # ############################################################################# + # ############################## Initial Cleanup ############################## + # ############################################################################# + + - name: Delete {{ data_set_name_A }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path_A }}" + state: absent + register: result + retries: 3 + until: result is not failed + + - name: Delete {{ data_set_name_B }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path_B }}" + state: absent + register: result + retries: 3 + until: result is not failed + + # ############################################################################# + # ####################### Module Testing - Initial ############################ + # ############################################################################# + + - name: Wrap test in block so cleanup always runs + block: + - name: Run MODULE_NAME module with initial state and destination A + ibm.ibm_zos_cics.MODULE_NAME: + state: "initial" + destination: "A" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name_A }} created (changed is true) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + - "'executions' in result" + + # ############################################################################# + + - name: Run MODULE_NAME module with initial state and destination B + ibm.ibm_zos_cics.MODULE_NAME: + state: "initial" + destination: "B" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name_B }} created (changed is true) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + - "'executions' in result" + + # ############################################################################# + # ####################### Module Testing - Absent ############################# + # ############################################################################# + + - name: Run MODULE_NAME module with absent state and destination A + ibm.ibm_zos_cics.MODULE_NAME: + state: "absent" + destination: "A" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name_A }} deleted (changed is true) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == true + - result.end_state.exists == false + - result.msg == "" + - "'executions' in result" + + # ############################################################################# + + - name: Run MODULE_NAME module with absent state and destination B + ibm.ibm_zos_cics.MODULE_NAME: + state: "absent" + destination: "B" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name_B }} deleted (changed is true) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == true + - result.end_state.exists == false + - result.msg == "" + - "'executions' in result" + + # ############################################################################# + # ####################### Module Testing - Mismatching ######################## + # ############################################################################# + + - name: Mismatched destination + ibm.ibm_zos_cics.MODULE_NAME: + region_data_sets: + DATA_SET_NAME_LOWER_A: + dsn: "{{ data_set_path_A }}" + state: absent + destination: B + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert Failure + ansible.builtin.assert: + that: + - result.failed is true + - result.changed is false + - result.msg == "No template or data set override found for DATA_SET_NAME_LOWER_B" + - "'executions' in result" + - "'start_state' in result" + - "'end_state' in result" + + # ############################################################################# + # ################################## Cleanup ################################## + # ############################################################################# + + always: + - name: Delete {{ data_set_name_A }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path_A }}" + state: absent + register: result + retries: 3 + until: result is not failed + + - name: Delete {{ data_set_name_B }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path_B }}" + state: absent + register: result + retries: 3 + until: result is not failed diff --git a/tests/integration/targets/cics_data_set/playbooks/initial_absent_template.yml b/tests/integration/targets/cics_data_set/playbooks/initial_absent_template.yml new file mode 100644 index 00000000..20155639 --- /dev/null +++ b/tests/integration/targets/cics_data_set/playbooks/initial_absent_template.yml @@ -0,0 +1,183 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Test Initial and Absent states for MODULE_NAME module + + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + vars: + data_set_path: "{{ region_data_set_path }}.{{ data_set_name }}" + # Default values for template, can be overridden by command line args + vsam: false + start: false + recreate: false + + module_defaults: + ibm.ibm_zos_cics.MODULE_NAME: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + + tasks: + # ############################################################################# + # ############################## Initial Cleanup ############################## + # ############################################################################# + + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed + + # ############################################################################# + # ####################### Module Testing - Initial ############################ + # ############################################################################# + + - name: Wrap test in block so cleanup always runs + block: + - name: Run MODULE_NAME module with initial state + ibm.ibm_zos_cics.MODULE_NAME: + state: "initial" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} created (changed is true) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + - "'executions' in result" + + # Only relevant for VSAM data sets + - name: Assert {{ data_set_name }} VSAM state + when: vsam + ansible.builtin.assert: + that: + - result.start_state.data_set_organization == "NONE" + - result.end_state.data_set_organization == "VSAM" + + # Only relevant for data sets that specify a start type + - name: Assert {{ data_set_name }} start state + when: start + ansible.builtin.assert: + that: + - result.start_state.autostart_override == '' + - result.start_state.next_start == '' + - result.end_state.autostart_override == 'AUTOINIT' + - result.end_state.next_start == 'UNKNOWN' + + # ############################################################################# + + - name: Run MODULE_NAME module with initial state again + ibm.ibm_zos_cics.MODULE_NAME: + state: "initial" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + # Some data sets will do nothing if initial is run a second time + - name: Assert {{ data_set_name }} not created again (changed is false) + when: not recreate + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == false + - result.start_state.exists == true + - result.end_state.exists == true + - result.msg == "" + - "'executions' in result" + + # Some data sets will delete all records if initial is run a second time. + - name: Assert {{ data_set_name }} created again (changed is true) + when: recreate + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == true + - result.end_state.exists == true + - result.msg == "" + - "'executions' in result" + + # ############################################################################# + # ####################### Module Testing - Absent ############################# + # ############################################################################# + + - name: Run MODULE_NAME module with absent state + ibm.ibm_zos_cics.MODULE_NAME: + state: "absent" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} deleted (changed is true) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == true + - result.end_state.exists == false + - result.msg == "" + - "'executions' in result" + + # Only relevant for VSAM data sets + - name: Assert {{ data_set_name }} VSAM state + when: vsam + ansible.builtin.assert: + that: + - result.start_state.data_set_organization == "VSAM" + - result.end_state.data_set_organization == "NONE" + + # Only relevant for data sets that specify a start type + - name: Assert {{ data_set_name }} start state + when: start + ansible.builtin.assert: + that: + - result.start_state.autostart_override == 'AUTOINIT' + - result.start_state.next_start == 'UNKNOWN' + - result.end_state.autostart_override == '' + - result.end_state.next_start == '' + + # ############################################################################# + + - name: Run MODULE_NAME module with absent state again + ibm.ibm_zos_cics.MODULE_NAME: + state: "absent" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} not deleted again (changed is false) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == false + - result.start_state.exists == false + - result.end_state.exists == false + - result.msg == "" + - "'executions' in result" + + # ############################################################################# + # ################################## Cleanup ################################## + # ############################################################################# + + always: + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed \ No newline at end of file diff --git a/tests/integration/targets/cics_data_set/playbooks/region_group_template.yml b/tests/integration/targets/cics_data_set/playbooks/region_group_template.yml new file mode 100644 index 00000000..90cec7e7 --- /dev/null +++ b/tests/integration/targets/cics_data_set/playbooks/region_group_template.yml @@ -0,0 +1,91 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Test region module defaults for MODULE_NAME module + + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + vars: + data_set_path: "{{ region_data_set_path }}.{{ data_set_name }}" + + module_defaults: + group/ibm.ibm_zos_cics.region: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + sdfhload: "{{ cics_install_path }}.SDFHLOAD" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + dfhtemp: + dsn: "{{ region_data_set_path }}.DFHTEMP" + dfhcsd: + dsn: "{{ region_data_set_path }}.DFHCSD" + dfhgcd: + dsn: "{{ region_data_set_path }}.DFHGCD" + dfhintra: + dsn: "{{ region_data_set_path }}.DFHINTRA" + dfhlcd: + dsn: "{{ region_data_set_path }}.DFHLCD" + dfhlrq: + dsn: "{{ region_data_set_path }}.DFHLRQ" + dfhauxt: + dsn: "{{ region_data_set_path }}.DFHAUXT" + dfhdmpa: + dsn: "{{ region_data_set_path }}.DFHDMPA" + state: "initial" + space_primary: 10 + space_secondary: 3 + space_type: "M" + volumes: "VOL001" + + tasks: + - name: Skip whole test if Ansible version too low + when: ansible_version.minor > 11 + block: + # ############################################################################# + # ############################## Initial Cleanup ############################## + # ############################################################################# + + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed + + # ############################################################################# + # ############################## Module Testing ############################### + # ############################################################################# + + - name: Wrap test in block so cleanup always runs + block: + - name: Run MODULE_NAME module with initial state + ibm.ibm_zos_cics.MODULE_NAME: + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} created + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + - "'executions' in result" + + # ############################################################################# + # ################################## Cleanup ################################## + # ############################################################################# + + always: + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is changed \ No newline at end of file diff --git a/tests/integration/targets/cics_data_set/playbooks/template_override_template.yml b/tests/integration/targets/cics_data_set/playbooks/template_override_template.yml new file mode 100644 index 00000000..55ad7362 --- /dev/null +++ b/tests/integration/targets/cics_data_set/playbooks/template_override_template.yml @@ -0,0 +1,140 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Test templating and overriding data set names for MODULE_NAME module + + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + vars: + data_set_path: "{{ region_data_set_path }}.{{ data_set_name }}" + + tasks: + # ############################################################################# + # ############################## Initial Cleanup ############################## + # ############################################################################# + + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed + + # ############################################################################# + # ############################## Module Testing ############################### + # ############################################################################# + + - name: Wrap test in block so cleanup always runs + block: + - name: Run MODULE_NAME module with template + ibm.ibm_zos_cics.MODULE_NAME: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + state: "initial" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} created (using templated dsn) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + - result.executions|select("search", "{{ region_data_set_path }}.{{ data_set_name }}") + + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is changed + + # ############################################################################# + + - name: Run MODULE_NAME module with override + ibm.ibm_zos_cics.MODULE_NAME: + cics_data_sets: + sdfhload: "{{ cics_install_path }}.SDFHLOAD" + region_data_sets: + DATA_SET_NAME_LOWER: + dsn: "{{ data_set_path }}" + state: "initial" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} created (using dsn override) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + - result.executions|select("search", "{{ data_set_path }}") + + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is changed + + # ############################################################################# + + - name: Run MODULE_NAME module with both template and override (override takes precedence) + ibm.ibm_zos_cics.MODULE_NAME: + cics_data_sets: + template: "This value should be ignored, and would cause an error if used" + sdfhload: "{{ cics_install_path }}.SDFHLOAD" + region_data_sets: + template: "This value should be ignored, and would cause an error if used" + DATA_SET_NAME_LOWER: + dsn: "{{ data_set_path }}" + state: "initial" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} created (using overridden dsn) + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + - result.executions|select("search", "{{ data_set_path }}") + + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is changed + + # ############################################################################# + # ################################## Cleanup ################################## + # ############################################################################# + + always: + - name: Delete {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed \ No newline at end of file diff --git a/tests/integration/targets/cics_data_set/playbooks/validation_template.yml b/tests/integration/targets/cics_data_set/playbooks/validation_template.yml new file mode 100644 index 00000000..2a678cd9 --- /dev/null +++ b/tests/integration/targets/cics_data_set/playbooks/validation_template.yml @@ -0,0 +1,136 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Test validation errors for MODULE_NAME module + + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + vars: + data_set_path: "{{ region_data_set_path }}.{{ data_set_name }}" + + module_defaults: + ibm.ibm_zos_cics.MODULE_NAME: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + + tasks: + # ############################################################################# + # ############################## Initial Cleanup ############################## + # ############################################################################# + + - name: Delete upper case {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path | upper }}" + state: absent + register: result + retries: 3 + until: result is not failed + + # ############################################################################# + # ############################## Module Testing ############################### + # ############################################################################# + + - name: Wrap test in block so cleanup always runs + block: + - name: Missing region_data_sets + ibm.ibm_zos_cics.MODULE_NAME: + state: absent + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert Failure + ansible.builtin.assert: + that: + - result.failed == true + - result.changed == false + - result.msg == "Required argument region_data_sets not found" + - "'executions' in result" + - "'start_state' in result" + - "'end_state' in result" + + # ############################################################################# + + - name: Missing template and dsn + ibm.ibm_zos_cics.MODULE_NAME: + region_data_sets: + template: + DATA_SET_NAME_LOWER: + dsn: + state: absent + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert Failure + ansible.builtin.assert: + that: + - result.failed == true + - result.changed == false + - result.msg == "No template or data set override found for {{ data_set_name | lower }}" + - "'executions' in result" + - "'start_state' in result" + - "'end_state' in result" + + # ############################################################################# + + - name: Bad dsn + ibm.ibm_zos_cics.MODULE_NAME: + region_data_sets: + template: "{{ region_data_set_path }}..." + state: absent + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert Failure + ansible.builtin.assert: + that: + - result.failed == true + - result.changed == false + - "'Invalid argument \"{{ region_data_set_path }}...\" for type \"data_set_base\".' in result.msg" + - "'executions' in result" + - "'start_state' in result" + - "'end_state' in result" + + # ############################################################################# + + - name: Lowercase dsn + ibm.ibm_zos_cics.MODULE_NAME: + region_data_sets: + template: "{{ data_set_path | lower }}" + state: "initial" + register: result + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_name }} created + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + - "'executions' in result" + + # ############################################################################# + # ################################## Cleanup ################################## + # ############################################################################# + + always: + - name: Delete upper case {{ data_set_name }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path | upper }}" + state: absent + register: result + retries: 3 + until: result is changed \ No newline at end of file diff --git a/tests/integration/targets/cics_data_set/runme.sh b/tests/integration/targets/cics_data_set/runme.sh new file mode 100755 index 00000000..af30461b --- /dev/null +++ b/tests/integration/targets/cics_data_set/runme.sh @@ -0,0 +1,95 @@ +#!/usr/bin/env bash +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +set -eux + +VAR_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/provisioning.yml" +INV_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/inventory_zos.yml" +ZOS_ENV="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/zos.yml" + +# These tests use a base "template" test that is run for each data set in turn, with parameters passed in +# to tailor the test. Where possible, parameters are passed in on the ansible-playbook command, but there +# are some things in ansible playbooks that can't have variables in (such module names). For these, the sed +# command is instead used to do a find and replace on the template file to insert the required value. + +sed -e "s/MODULE_NAME/aux_temp_storage/g" playbooks/initial_absent_template.yml > playbooks/initial_absent_aux_temp_storage.yml +sed -e "s/MODULE_NAME/csd/g" playbooks/initial_absent_template.yml > playbooks/initial_absent_csd.yml +sed -e "s/MODULE_NAME/global_catalog/g" playbooks/initial_absent_template.yml > playbooks/initial_absent_global_catalog.yml +sed -e "s/MODULE_NAME/td_intrapartition/g" playbooks/initial_absent_template.yml > playbooks/initial_absent_td_intrapartition.yml +sed -e "s/MODULE_NAME/local_catalog/g" playbooks/initial_absent_template.yml > playbooks/initial_absent_local_catalog.yml +sed -e "s/MODULE_NAME/local_request_queue/g" playbooks/initial_absent_template.yml > playbooks/initial_absent_local_request_queue.yml +sed -e "s/MODULE_NAME/aux_trace/g" playbooks/initial_absent_template.yml > playbooks/initial_absent_aux_trace.yml +sed -e "s/MODULE_NAME/transaction_dump/g" playbooks/initial_absent_template.yml > playbooks/initial_absent_transaction_dump.yml + +sed -e "s/MODULE_NAME/aux_temp_storage/g" -e "s/DATA_SET_NAME_LOWER/dfhtemp/g" playbooks/validation_template.yml > playbooks/validation_aux_temp_storage.yml +sed -e "s/MODULE_NAME/csd/g" -e "s/DATA_SET_NAME_LOWER/dfhcsd/g" playbooks/validation_template.yml > playbooks/validation_csd.yml +sed -e "s/MODULE_NAME/global_catalog/g" -e "s/DATA_SET_NAME_LOWER/dfhgcd/g" playbooks/validation_template.yml > playbooks/validation_global_catalog.yml +sed -e "s/MODULE_NAME/td_intrapartition/g" -e "s/DATA_SET_NAME_LOWER/dfhintra/g" playbooks/validation_template.yml > playbooks/validation_td_intrapartition.yml +sed -e "s/MODULE_NAME/local_catalog/g" -e "s/DATA_SET_NAME_LOWER/dfhlcd/g" playbooks/validation_template.yml > playbooks/validation_local_catalog.yml +sed -e "s/MODULE_NAME/local_request_queue/g" -e "s/DATA_SET_NAME_LOWER/dfhlrq/g" playbooks/validation_template.yml > playbooks/validation_local_request_queue.yml +sed -e "s/MODULE_NAME/aux_trace/g" -e "s/DATA_SET_NAME_LOWER/dfhauxt/g" playbooks/validation_template.yml > playbooks/validation_aux_trace.yml +sed -e "s/MODULE_NAME/transaction_dump/g" -e "s/DATA_SET_NAME_LOWER/dfhdmpa/g" playbooks/validation_template.yml > playbooks/validation_transaction_dump.yml + +sed -e "s/MODULE_NAME/aux_trace/g" -e "s/DATA_SET_NAME_LOWER_A/dfhauxt/g" -e "s/DATA_SET_NAME_LOWER_B/dfhbuxt/g" playbooks/destination_template.yml > playbooks/destination_aux_trace.yml +sed -e "s/MODULE_NAME/transaction_dump/g" -e "s/DATA_SET_NAME_LOWER_A/dfhdmpa/g" -e "s/DATA_SET_NAME_LOWER_B/dfhdmpb/g" playbooks/destination_template.yml > playbooks/destination_transaction_dump.yml + +sed -e "s/MODULE_NAME/aux_temp_storage/g" playbooks/region_group_template.yml > playbooks/region_group_aux_temp_storage.yml +sed -e "s/MODULE_NAME/csd/g" playbooks/region_group_template.yml > playbooks/region_group_csd.yml +sed -e "s/MODULE_NAME/global_catalog/g" playbooks/region_group_template.yml > playbooks/region_group_global_catalog.yml +sed -e "s/MODULE_NAME/td_intrapartition/g" playbooks/region_group_template.yml > playbooks/region_group_td_intrapartition.yml +sed -e "s/MODULE_NAME/local_catalog/g" playbooks/region_group_template.yml > playbooks/region_group_local_catalog.yml +sed -e "s/MODULE_NAME/local_request_queue/g" playbooks/region_group_template.yml > playbooks/region_group_local_request_queue.yml +sed -e "s/MODULE_NAME/aux_trace/g" playbooks/region_group_template.yml > playbooks/region_group_aux_trace.yml +sed -e "s/MODULE_NAME/transaction_dump/g" playbooks/region_group_template.yml > playbooks/region_group_transaction_dump.yml + +sed -e "s/MODULE_NAME/aux_temp_storage/g" -e "s/DATA_SET_NAME_LOWER/dfhtemp/g" playbooks/template_override_template.yml > playbooks/template_override_aux_temp_storage.yml +sed -e "s/MODULE_NAME/csd/g" -e "s/DATA_SET_NAME_LOWER/dfhcsd/g" playbooks/template_override_template.yml > playbooks/template_override_csd.yml +sed -e "s/MODULE_NAME/global_catalog/g" -e "s/DATA_SET_NAME_LOWER/dfhgcd/g" playbooks/template_override_template.yml > playbooks/template_override_global_catalog.yml +sed -e "s/MODULE_NAME/td_intrapartition/g" -e "s/DATA_SET_NAME_LOWER/dfhintra/g" playbooks/template_override_template.yml > playbooks/template_override_td_intrapartition.yml +sed -e "s/MODULE_NAME/local_catalog/g" -e "s/DATA_SET_NAME_LOWER/dfhlcd/g" playbooks/template_override_template.yml > playbooks/template_override_local_catalog.yml +sed -e "s/MODULE_NAME/local_request_queue/g" -e "s/DATA_SET_NAME_LOWER/dfhlrq/g" playbooks/template_override_template.yml > playbooks/template_override_local_request_queue.yml +sed -e "s/MODULE_NAME/aux_trace/g" -e "s/DATA_SET_NAME_LOWER/dfhauxt/g" playbooks/template_override_template.yml > playbooks/template_override_aux_trace.yml +sed -e "s/MODULE_NAME/transaction_dump/g" -e "s/DATA_SET_NAME_LOWER/dfhdmpa/g" playbooks/template_override_template.yml > playbooks/template_override_transaction_dump.yml + +# For debug, uncomment this to save the generated playbooks to the output directory if you want to see the effect of the sed commands +# cp -r playbooks $ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/output/templates + +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHTEMP, vsam: true}" playbooks/initial_absent_aux_temp_storage.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHCSD, vsam: true, recreate: true}" playbooks/initial_absent_csd.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHGCD, vsam: true, start: true}" playbooks/initial_absent_global_catalog.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHINTRA, vsam: true}" playbooks/initial_absent_td_intrapartition.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHLCD, vsam: true, recreate: true}" playbooks/initial_absent_local_catalog.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHLRQ, vsam: true}" playbooks/initial_absent_local_request_queue.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHAUXT}" playbooks/initial_absent_aux_trace.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHDMPA}" playbooks/initial_absent_transaction_dump.yml + +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHTEMP}" playbooks/validation_aux_temp_storage.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHCSD}" playbooks/validation_csd.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHGCD}" playbooks/validation_global_catalog.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHINTRA}" playbooks/validation_td_intrapartition.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHLCD}" playbooks/validation_local_catalog.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHLRQ}" playbooks/validation_local_request_queue.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHAUXT}" playbooks/validation_aux_trace.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHDMPA}" playbooks/validation_transaction_dump.yml + +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name_A: DFHAUXT, data_set_name_B: DFHBUXT}" playbooks/destination_aux_trace.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name_A: DFHDMPA, data_set_name_B: DFHDMPB}" playbooks/destination_transaction_dump.yml + +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHTEMP}" playbooks/region_group_aux_temp_storage.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHCSD}" playbooks/region_group_csd.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHGCD}" playbooks/region_group_global_catalog.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHINTRA}" playbooks/region_group_td_intrapartition.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHLCD}" playbooks/region_group_local_catalog.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHLRQ}" playbooks/region_group_local_request_queue.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHAUXT}" playbooks/region_group_aux_trace.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHDMPA}" playbooks/region_group_transaction_dump.yml + +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHTEMP}" playbooks/template_override_aux_temp_storage.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHCSD}" playbooks/template_override_csd.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHGCD}" playbooks/template_override_global_catalog.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHINTRA}" playbooks/template_override_td_intrapartition.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHLCD}" playbooks/template_override_local_catalog.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHLRQ}" playbooks/template_override_local_request_queue.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHAUXT}" playbooks/template_override_aux_trace.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" -e "{data_set_name: DFHDMPA}" playbooks/template_override_transaction_dump.yml diff --git a/tests/integration/targets/cics_missing_zos_core/playbooks/missing_core.yml b/tests/integration/targets/cics_missing_zos_core/playbooks/missing_core.yml new file mode 100644 index 00000000..ee18c471 --- /dev/null +++ b/tests/integration/targets/cics_missing_zos_core/playbooks/missing_core.yml @@ -0,0 +1,108 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Missing zos core + + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + + tasks: + # ############################################################################# + # ############################## Module Testing ############################### + # ############################################################################# + + - name: Create global catalog data set + ibm.ibm_zos_cics.global_catalog: + state: "initial" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + var: result + + - name: Assert global failed + ansible.builtin.assert: + that: + - result.failed is true + - "'unable to locate collection ibm.ibm_zos_core' in result.msg" + + - name: Create local catalog data set + ibm.ibm_zos_cics.local_catalog: + state: "initial" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + var: result + + - name: Assert local failed + ansible.builtin.assert: + that: + - result.failed is true + - "'unable to locate collection ibm.ibm_zos_core' in result.msg" + + - name: Create lrq data set + ibm.ibm_zos_cics.local_request_queue: + state: "initial" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + var: result + + - name: Assert lrq failed + ansible.builtin.assert: + that: + - result.failed is true + - "'unable to locate collection ibm.ibm_zos_core' in result.msg" + + - name: Create TD Intrapartition data set + ibm.ibm_zos_cics.td_intrapartition: + state: "initial" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + var: result + + - name: Assert intra failed + ansible.builtin.assert: + that: + - result.failed is true + - "'unable to locate collection ibm.ibm_zos_core' in result.msg" + + - name: Create Auxiliary Temp data set + ibm.ibm_zos_cics.aux_temp_storage: + state: "initial" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + register: result + ignore_errors: true + - name: Debug + ansible.builtin.debug: + var: result + + - name: Assert aux temp failed + ansible.builtin.assert: + that: + - result.failed is true + - "'unable to locate collection ibm.ibm_zos_core' in result.msg" diff --git a/tests/integration/targets/cics_missing_zos_core/runme.sh b/tests/integration/targets/cics_missing_zos_core/runme.sh new file mode 100755 index 00000000..296aa97a --- /dev/null +++ b/tests/integration/targets/cics_missing_zos_core/runme.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +set -eux + +VAR_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/provisioning.yml" +INV_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/inventory_zos.yml" +ZOS_ENV="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/zos.yml" + + +if [ -z "$(ansible-galaxy collection list ibm.ibm_zos_core)" ]; then +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/missing_core.yml +fi \ No newline at end of file diff --git a/tests/integration/targets/cics_region_jcl/playbooks/region_jcl_member.yml b/tests/integration/targets/cics_region_jcl/playbooks/region_jcl_member.yml new file mode 100644 index 00000000..d633696c --- /dev/null +++ b/tests/integration/targets/cics_region_jcl/playbooks/region_jcl_member.yml @@ -0,0 +1,285 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Test lifecycle of Member data set for region_jcl module + + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + vars: + base_data_set_name: START + member_name: STRTMEMB + data_set_path: "{{ region_data_set_path }}.{{ base_data_set_name }}({{ member_name }})" + base_data_set_path: "{{ region_data_set_path }}.{{ base_data_set_name }}" + + module_defaults: + ibm.ibm_zos_cics.region_jcl: + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + dfhstart: + dsn: "{{ data_set_path }}" + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + sdfhlic: "{{ cics_install_path }}.LIC.SDFHLIC" + le_data_sets: + template: "{{ le_path }}.<< lib_name >>" + applid: "{{ start_region_applid }}" + + tasks: + # ############################################################################# + # ############################## Initial Cleanup ############################## + # ############################################################################# + + - name: Delete base data set {{ base_data_set_path }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ base_data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed + + # ############################################################################# + # ############################## Module Testing ############################### + # ############################################################################# + + - name: Wrap test in block so cleanup always runs + block: + - name: Run region_jcl module with initial state for non-exisitng PDS + ibm.ibm_zos_cics.region_jcl: + state: initial + ignore_errors: true + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert initial state failed as {{ base_data_set_path }} does not exist + ansible.builtin.assert: + that: + - result.failed == true + - result.changed == false + - result.start_state.exists == false + - result.end_state.exists == false + - result.msg == "Base data set {{ base_data_set_path }} does not exist. Can only create a member in an existing PDS/E" + + # ############################################################################# + + - name: Create base data set {{ base_data_set_path }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ base_data_set_path }}" + state: present + type: PDS + register: result + retries: 3 + until: result is not failed + + - name: Run region_jcl module with initial state for exisitng PDS + ibm.ibm_zos_cics.region_jcl: + state: initial + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_path }} created + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == false + - result.end_state.exists == true + - result.msg == "" + + # ############################################################################# + + - name: Run region_jcl module with initial state when member already exists + ibm.ibm_zos_cics.region_jcl: + state: initial + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_path }} already existed but now recreated + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == true + - result.end_state.exists == true + - result.msg == "" + + # ############################################################################# + + - name: Run region_jcl module with warm state for matching data set content + ibm.ibm_zos_cics.region_jcl: + state: warm + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_path }} in warm state + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == false + - result.start_state.exists == true + - result.end_state.exists == true + - result.msg == "" + + # ############################################################################# + + - name: Run region_jcl module with absent state + ibm.ibm_zos_cics.region_jcl: + state: absent + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_path }} in absent state + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == true + - result.end_state.exists == false + - result.msg == "" + + # ############################################################################# + + - name: Run region_jcl module with warm state for non-exisitng member + ibm.ibm_zos_cics.region_jcl: + state: warm + ignore_errors: true + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert warm state failed for {{ data_set_path }} + ansible.builtin.assert: + that: + - result.failed == true + - result.changed == false + - result.start_state.exists == false + - result.end_state.exists == false + - result.msg == "Data set {{ data_set_path }} does not exist." + + # ############################################################################# + + - name: Create dummy member {{ data_set_path }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ data_set_path }}" + state: present + type: member + register: result + retries: 3 + until: result is not failed + + - name: Run region_jcl module with warm state for non-matching data set content + ibm.ibm_zos_cics.region_jcl: + state: warm + ignore_errors: true + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert warm state failed for {{ data_set_path }} + ansible.builtin.assert: + that: + - result.failed == true + - result.changed == false + - result.start_state.exists == true + - result.end_state.exists == true + - result.msg == "Data set {{ data_set_path }} does not contain the expected Region JCL." + + # ############################################################################# + + - name: Run region_jcl module with absent state + ibm.ibm_zos_cics.region_jcl: + state: absent + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_path }} in absent state + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == true + - result.start_state.exists == true + - result.end_state.exists == false + - result.msg == "" + + # ############################################################################# + + - name: Run region_jcl module with absent state for non-existing member + ibm.ibm_zos_cics.region_jcl: + state: absent + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_path }} in absent state + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == false + - result.start_state.exists == false + - result.end_state.exists == false + - result.msg == "" + + # ############################################################################# + + - name: Delete base data set {{ base_data_set_path }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ base_data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed + + - name: Run region_jcl module with absent state for non-existing base data set + ibm.ibm_zos_cics.region_jcl: + state: absent + register: result + + - name: Debug + ansible.builtin.debug: + msg: "{{ result }}" + + - name: Assert {{ data_set_path }} in absent state + ansible.builtin.assert: + that: + - result.failed == false + - result.changed == false + - result.start_state.exists == false + - result.end_state.exists == false + - result.msg == "" + + # ############################################################################# + # ################################## Cleanup ################################## + # ############################################################################# + + always: + - name: Delete base data set {{ base_data_set_path }} + ibm.ibm_zos_core.zos_data_set: + name: "{{ base_data_set_path }}" + state: absent + register: result + retries: 3 + until: result is not failed diff --git a/tests/integration/targets/cics_region_jcl/runme.sh b/tests/integration/targets/cics_region_jcl/runme.sh new file mode 100755 index 00000000..c9295f72 --- /dev/null +++ b/tests/integration/targets/cics_region_jcl/runme.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +set -eux + +VAR_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/provisioning.yml" +INV_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/inventory_zos.yml" +ZOS_ENV="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/zos.yml" + +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/region_jcl_member.yml diff --git a/tests/integration/targets/cics_start_stop/playbooks/missing_jobs.yml b/tests/integration/targets/cics_start_stop/playbooks/missing_jobs.yml new file mode 100644 index 00000000..ec17980c --- /dev/null +++ b/tests/integration/targets/cics_start_stop/playbooks/missing_jobs.yml @@ -0,0 +1,57 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Stop missing jobs + hosts: 'all' + gather_facts: false + environment: "{{ environment_vars }}" + + tasks: + - name: Issue shutdown command on missing job ID + ibm.ibm_zos_cics.stop_region: + job_id: NONJOB + register: stop_result + ignore_errors: true + + - name: Log output of stop + ansible.builtin.debug: + msg: "{{ stop_result }}" + + - name: Assert CICS stop failed + ansible.builtin.assert: + that: + - stop_result.failed == true + - stop_result.msg == "No jobs found with id NONJOB" + + - name: Issue shutdown command on missing job name + ibm.ibm_zos_cics.stop_region: + job_name: NONJOB + register: stop_result + ignore_errors: true + + - name: Log output of stop + ansible.builtin.debug: + msg: "{{ stop_result }}" + + - name: Assert CICS stop failed + ansible.builtin.assert: + that: + - stop_result.failed == true + - stop_result.msg == "Job with name NONJOB not found" + + - name: Issue shutdown command on missing job name and ID + ibm.ibm_zos_cics.stop_region: + job_name: NONJOB + job_id: NONJOB + register: stop_result + ignore_errors: true + + - name: Log output of stop + ansible.builtin.debug: + msg: "{{ stop_result }}" + + - name: Assert CICS stop failed + ansible.builtin.assert: + that: + - stop_result.failed == true + - stop_result.msg == "No jobs found with name NONJOB and ID NONJOB" diff --git a/tests/integration/targets/cics_start_stop/playbooks/provisioning_and_deprovisioning.yml b/tests/integration/targets/cics_start_stop/playbooks/provisioning_and_deprovisioning.yml new file mode 100644 index 00000000..65d8d619 --- /dev/null +++ b/tests/integration/targets/cics_start_stop/playbooks/provisioning_and_deprovisioning.yml @@ -0,0 +1,84 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Provision and Deprovision CICS Region + hosts: 'all' + gather_facts: false + environment: "{{ environment_vars }}" + + module_defaults: + group/ibm.ibm_zos_cics.region: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + space_primary: 5 + space_secondary: 2 + space_type: "M" + + tasks: + - name: Normal Start Stop + block: + - name: Initial normal + block: + - name: Create data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: initial + + - name: Start CICS + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Stop CICS Region + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + vars: + mode: normal + + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + + - name: Warm Immediate + block: + - name: Warm CICS data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: warm + + - name: Start CICS + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Stop CICS Region + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + vars: + mode: immediate + + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + + - name: Normal Cancel + block: + - name: Start CICS + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Stop CICS Region + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + vars: + mode: cancel + + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + + always: + - name: Delete data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: absent diff --git a/tests/integration/targets/cics_start_stop/playbooks/start_from_PDS.yml b/tests/integration/targets/cics_start_stop/playbooks/start_from_PDS.yml new file mode 100644 index 00000000..6a58eaee --- /dev/null +++ b/tests/integration/targets/cics_start_stop/playbooks/start_from_PDS.yml @@ -0,0 +1,103 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Provision and Deprovision CICS Region + hosts: 'all' + gather_facts: false + environment: "{{ environment_vars }}" + + module_defaults: + group/ibm.ibm_zos_cics.region: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + dfhstart: + dsn: "{{ region_data_set_path }}.LIB(DFHSTART)" + space_primary: 5 + space_secondary: 2 + space_type: "M" + + tasks: + - name: Normal Start Stop + block: + - name: Initial normal + block: + - name: Create PDS for region JCL data set member + ibm.ibm_zos_core.zos_data_set: + name: "{{ region_data_set_path }}.LIB" + state: present + type: PDS + register: result + retries: 3 + until: result is not failed + + - name: Create data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: initial + + - name: Start CICS + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Stop CICS Region + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + vars: + mode: normal + + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + + - name: Warm Immediate + block: + - name: Warm CICS data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: warm + + - name: Start CICS + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Stop CICS Region + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + vars: + mode: immediate + + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + + - name: Normal Cancel + block: + - name: Start CICS + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Stop CICS Region + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + vars: + mode: cancel + + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + + always: + - name: Delete data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: absent + + - name: Delete PDS for region JCL data set member + ibm.ibm_zos_core.zos_data_set: + name: "{{ region_data_set_path }}.LIB" + state: absent + register: result + retries: 3 + until: result is not failed diff --git a/tests/integration/targets/cics_start_stop/playbooks/stop_args.yml b/tests/integration/targets/cics_start_stop/playbooks/stop_args.yml new file mode 100644 index 00000000..07ceea9f --- /dev/null +++ b/tests/integration/targets/cics_start_stop/playbooks/stop_args.yml @@ -0,0 +1,249 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Deprovision with job name + hosts: 'all' + gather_facts: false + environment: "{{ environment_vars }}" + + module_defaults: + group/ibm.ibm_zos_cics.region: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + space_primary: 2 + space_secondary: 1 + space_type: "M" + + tasks: + - name: Stop no args and job name + block: + - name: Create data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: initial + + - name: Start CICS (1) + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Stop with no args + ibm.ibm_zos_cics.stop_region: + register: stop_result + timeout: 300 + ignore_errors: true + + - name: Assert stop with no args failed + ansible.builtin.assert: + that: + - stop_result.failed is true + - stop_result.changed is false + - "{{ stop_result.executions | length }} == 0" + - stop_result.msg == "one of the following is required: job_id, job_name" + + - name: Stop CICS using job name + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + vars: + mode: normal + jobid: false + jobname: true + + - name: Stop job name and job id + block: + - name: Start CICS (2) + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Stop CICS with both name and ID + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + vars: + mode: normal + jobname: true + job_id: true + + - name: Stop job timeout + block: + - name: Start CICS (3) + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Issue shutdown command (with job_id and job_name) + ibm.ibm_zos_cics.stop_region: + job_name: "{{ start_region_applid }}" + job_id: "{{ start_result.stdout }}" + mode: normal + timeout: 120 + register: stop_result + + - name: Log output of stop + ansible.builtin.debug: + msg: "{{ stop_result }}" + + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed == false + - stop_result.msg == "" + - stop_result.executions[-1].return.failed == False + - stop_result.executions[-1].return.output[0].content | length == 2 + - "'ON OUTPUT QUEUE' in '{{ stop_result.executions[-1].return.output[0].content | join(' ') }}'" + fail_msg: "CICS Region did not stop successfully" + - name: Assert execution RCs are 0 + ansible.builtin.assert: + that: + - item.rc == 0 + msg: Job failed with RC != 0 + loop: "{{ stop_result.executions }}" + + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + + - name: Stop job no timeout + block: + - name: Start CICS (4) + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Issue shutdown command (with job_id and job_name) + ibm.ibm_zos_cics.stop_region: + job_name: "{{ start_region_applid }}" + job_id: "{{ start_result.stdout }}" + mode: normal + timeout: -1 + register: stop_result + timeout: 300 + + - name: Log output of stop + ansible.builtin.debug: + msg: "{{ stop_result }}" + + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed != True + - stop_result.msg == "" + - stop_result.executions[-1].return.failed == False + - stop_result.executions[-1].return.output[0].content | length == 2 + - "'ON OUTPUT QUEUE' in '{{ stop_result.executions[-1].return.output[0].content | join(' ') }}'" + fail_msg: "CICS Region did not stop successfully" + - name: Assert execution RCs are 0 + ansible.builtin.assert: + that: + - item.rc == 0 + msg: Job failed with RC != 0 + loop: "{{ stop_result.executions }}" + + - name: Stop job mismatched name and ID + block: + - name: Start CICS (5) + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Issue shutdown command (wrong id) + ibm.ibm_zos_cics.stop_region: + job_name: "{{ start_region_applid }}" + job_id: "NONJOB" + register: stop_result + timeout: 300 + ignore_errors: true + + - name: Log output of stop + ansible.builtin.debug: + msg: "{{ stop_result }}" + + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed == true + - stop_result.msg == "No jobs found with name {{ start_region_applid }} and ID NONJOB" + + - name: Issue shutdown command (wrong name) + ibm.ibm_zos_cics.stop_region: + job_name: "NONJOB" + job_id: "{{ start_result.stdout }}" + register: stop_result + timeout: 300 + ignore_errors: true + + - name: Log output of stop + ansible.builtin.debug: + msg: "{{ stop_result }}" + + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed == true + - stop_result.msg == "No jobs found with name NONJOB and ID {{ start_result.stdout }}" + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan C {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + + - name: Stop job already stopped + block: + - name: Issue shutdown command with name + ibm.ibm_zos_cics.stop_region: + job_name: "{{ start_region_applid }}" + register: stop_result + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed == false + - stop_result.changed == false + - stop_result.msg == "" + - name: Issue shutdown command with ID + ibm.ibm_zos_cics.stop_region: + job_id: "{{ start_result.stdout }}" + register: stop_result + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed == false + - stop_result.changed == false + - stop_result.msg == "" + - name: Issue shutdown command with ID and name + ibm.ibm_zos_cics.stop_region: + job_id: "{{ start_result.stdout }}" + job_name: "{{ start_region_applid }}" + register: stop_result + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed == false + - stop_result.changed == false + - stop_result.msg == "" + + - name: Issue shutdown command with mismatched name and id (wrong name) + ibm.ibm_zos_cics.stop_region: + job_id: "{{ start_result.stdout }}" + job_name: "NONJOB" + register: stop_result + ignore_errors: true + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed == true + - stop_result.changed == false + - stop_result.msg == "No jobs found with name NONJOB and ID {{ start_result.stdout }}" + - name: Issue shutdown command with mismatched name and id (wrong id) + ibm.ibm_zos_cics.stop_region: + job_id: "NONJOB" + job_name: "{{ start_region_applid }}" + register: stop_result + ignore_errors: true + - name: Assert CICS stop did not fail + ansible.builtin.assert: + that: + - stop_result.failed == true + - stop_result.changed == false + - stop_result.msg == "No jobs found with name {{ start_region_applid }} and ID NONJOB" + + always: + - name: Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + - name: Delete data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: absent diff --git a/tests/integration/targets/cics_start_stop/playbooks/validate_console_autoinstall_fail.yml b/tests/integration/targets/cics_start_stop/playbooks/validate_console_autoinstall_fail.yml new file mode 100644 index 00000000..b99104bb --- /dev/null +++ b/tests/integration/targets/cics_start_stop/playbooks/validate_console_autoinstall_fail.yml @@ -0,0 +1,61 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Validate console autoinstall failed + hosts: 'all' + gather_facts: false + environment: "{{ environment_vars }}" + + vars: + AUTOINSTALL_FAIL_MSG: "Shutdown command failed because the auto-install of the console was unsuccessful. See executions for full command output." + + module_defaults: + group/ibm.ibm_zos_cics.region: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + space_primary: 1 + space_secondary: 1 + space_type: "M" + + tasks: + - name: Failed stop + block: + - name: Create data sets (not CSD) + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: initial + ignore_csd_script: true + aicons_val: "AUTO" + + - name: Start CICS + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Delay + ansible.builtin.pause: + seconds: 5 + + - name: Stop CICS Region + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + ignore_errors: true + vars: + mode: normal + expect_failure: true + + - name: Assert failure with message + ansible.builtin.assert: + that: + - stop_output.failed is true + - stop_output.msg == "{{ AUTOINSTALL_FAIL_MSG }}" + - stop_output.changed is false + - "'executions' in stop_output" + always: + - name: Cancel and Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + - name: Delete data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: absent diff --git a/tests/integration/targets/cics_start_stop/playbooks/validate_console_not_defined.yml b/tests/integration/targets/cics_start_stop/playbooks/validate_console_not_defined.yml new file mode 100644 index 00000000..a32cb0b0 --- /dev/null +++ b/tests/integration/targets/cics_start_stop/playbooks/validate_console_not_defined.yml @@ -0,0 +1,61 @@ +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: Validate console not defined + hosts: 'all' + gather_facts: false + environment: "{{ environment_vars }}" + + vars: + UNDEFINED_CONSOLE_MSG: "Shutdown command failed because the console used was not defined. See executions for full command output." + + module_defaults: + group/ibm.ibm_zos_cics.region: + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + space_primary: 1 + space_secondary: 1 + space_type: "M" + + tasks: + - name: Failed stop + block: + - name: Create data sets (not CSD) + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: initial + ignore_csd_script: true + aicons_val: "NO" + + - name: Start CICS + ansible.builtin.import_tasks: ../repeatable_tasks/start_cics.yml + + - name: Delay + ansible.builtin.pause: + seconds: 5 + + - name: Stop CICS Region + ansible.builtin.import_tasks: ../repeatable_tasks/stop_region.yml + ignore_errors: true + vars: + mode: normal + expect_failure: true + + - name: Assert failure with message + ansible.builtin.assert: + that: + - stop_output.failed is true + - stop_output.msg == "{{ UNDEFINED_CONSOLE_MSG }}" + - stop_output.changed is false + - "'executions' in stop_output" + always: + - name: Cancel and Delete job + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + ignore_errors: true + - name: Delete data sets + ansible.builtin.import_tasks: ../repeatable_tasks/data_sets.yml + vars: + data_set_state: absent diff --git a/tests/integration/targets/cics_start_stop/repeatable_tasks/data_sets.yml b/tests/integration/targets/cics_start_stop/repeatable_tasks/data_sets.yml new file mode 100644 index 00000000..9bf9783d --- /dev/null +++ b/tests/integration/targets/cics_start_stop/repeatable_tasks/data_sets.yml @@ -0,0 +1,111 @@ +- name: "Auxiliary temporary storage data set (DFHTEMP) {{ data_set_state }}" + ibm.ibm_zos_cics.aux_temp_storage: + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: "Auxiliary trace data set (DFHAUXT) {{ data_set_state }}" + ibm.ibm_zos_cics.aux_trace: + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: "Second auxiliary trace data set (DFHBUXT) {{ data_set_state }}" + ibm.ibm_zos_cics.aux_trace: + state: "{{ data_set_state }}" + destination: B + register: result + retries: 3 + until: result is not failed + +- name: "CICS Global Catalog {{ data_set_state }}" + ibm.ibm_zos_cics.global_catalog: + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: "CICS Local Catalog {{ data_set_state }}" + ibm.ibm_zos_cics.local_catalog: + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: "CSD data set (DFHCSD) {{ data_set_state }}" + ibm.ibm_zos_cics.csd: + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: Run DFHCSDUP script + when: + - ignore_csd_script is not defined or ignore_csd_script is false + - data_set_state == "initial" + ibm.ibm_zos_cics.csd: + state: "changed" + input_location: "INLINE" + input_content: "ADD GROUP(DFHTERMC) LIST(DFHLIST1)" + +- name: "Local request queue data set (DFHLRQ) {{ data_set_state }}" + ibm.ibm_zos_cics.local_request_queue: + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: "Transient data intrapartition data set (DFHINTRA) {{ data_set_state }}" + ibm.ibm_zos_cics.td_intrapartition: + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: "Transaction dump data set (DFHDMPA) {{ data_set_state }}" + ibm.ibm_zos_cics.transaction_dump: + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: "Second transaction dump data set (DFHDMPB) {{ data_set_state }}" + ibm.ibm_zos_cics.transaction_dump: + destination: B + state: "{{ data_set_state }}" + register: result + retries: 3 + until: result is not failed + +- name: Create start CICS JCL + ibm.ibm_zos_cics.region_jcl: + state: "{{ data_set_state }}" + applid: "{{ start_region_applid }}" + job_parameters: + region: 0M + cics_data_sets: + template: "{{ cics_install_path }}.<< lib_name >>" + sdfhlic: "{{ cics_install_path }}.LIC.SDFHLIC" + le_data_sets: + template: "{{ le_path }}.<< lib_name >>" + region_data_sets: + template: "{{ region_data_set_path }}.<< data_set_name >>" + cpsm_data_sets: + template: "{{ cpsm_data_set_path }}.<< lib_name >>" + sit_parameters: + start: AUTO + sit: 6$ + aicons: "{{ aicons_val | default('AUTO') }}" + cicssvc: 217 + edsalim: 500M + grplist: (DFHLIST*) + gmtext: 'Welcome to CICS Integration Tests' + srbsvc: 218 + tcpip: "NO" + usshome: "{{ usshome }}" + sysidnt: ZPY1 + register: result + retries: 3 + until: result is not failed diff --git a/tests/integration/targets/cics_start_stop/repeatable_tasks/start_cics.yml b/tests/integration/targets/cics_start_stop/repeatable_tasks/start_cics.yml new file mode 100644 index 00000000..13eed531 --- /dev/null +++ b/tests/integration/targets/cics_start_stop/repeatable_tasks/start_cics.yml @@ -0,0 +1,33 @@ +- name: Start CICS Region + ansible.builtin.command: + cmd: jsub "{{ region_data_set_path }}.DFHSTART" + register: start_result + changed_when: true + +- name: Log output of start + ansible.builtin.debug: + msg: "{{ start_result }}" + +- name: Wait for the region to start + ansible.builtin.pause: + seconds: 10 + +- name: Assert Start CICS Module did not fail + ansible.builtin.assert: + that: + - start_result.rc == 0 + - start_result.failed == False + - start_result.changed == True + fail_msg: "Start CICS module failed" + +- name: Check jobs running + ibm.ibm_zos_core.zos_job_query: + job_id: "{{ start_result.stdout }}" + register: running_result + +- name: Assert CICS started + ansible.builtin.assert: + that: + - running_result.jobs | selectattr("ret_code", 'equalto', None) | list | length > 0 + - running_result.failed != True + fail_msg: "CICS Region did not start successfully" diff --git a/tests/integration/targets/cics_start_stop/repeatable_tasks/stop_region.yml b/tests/integration/targets/cics_start_stop/repeatable_tasks/stop_region.yml new file mode 100644 index 00000000..2346310a --- /dev/null +++ b/tests/integration/targets/cics_start_stop/repeatable_tasks/stop_region.yml @@ -0,0 +1,94 @@ +- name: Create data set lock if it doesn't exist to indicate console is in use + ibm.ibm_zos_core.zos_data_set: + name: "{{ ansible_user }}.AN{{ build_number }}.STOPCICS.LOCK" + type: seq + state: present + replace: false + register: ds_status + until: ds_status.changed + retries: 180 + delay: 10 + +- name: Stop Region + block: + - name: Issue shutdown command (with job_id) + when: + - jobname is not defined or jobname is false + - jobid is not defined or jobid is true + ibm.ibm_zos_cics.stop_region: + job_id: "{{ start_result.stdout }}" + mode: "{{ mode }}" + register: stop_result + timeout: 300 + + - name: Set stop_output + ansible.builtin.set_fact: stop_output="{{ stop_result }}" + when: "'skip_reason' not in {{ stop_result }}" + + - name: Issue shutdown command (with job_name) + when: + - jobname is true + - jobid is false + ibm.ibm_zos_cics.stop_region: + job_name: "{{ start_region_applid }}" + mode: "{{ mode }}" + register: stop_result + timeout: 300 + + - name: Set stop_output + ansible.builtin.set_fact: stop_output="{{ stop_result }}" + when: stop_result.changed + + - name: Issue shutdown command (with job_id and job_name) + when: + - jobname is true + - jobid is not defined or jobid is true + ibm.ibm_zos_cics.stop_region: + job_name: "{{ start_region_applid }}" + job_id: "{{ start_result.stdout }}" + mode: "{{ mode }}" + register: stop_result + timeout: 300 + + - name: Set stop_output + ansible.builtin.set_fact: stop_output="{{ stop_result }}" + when: stop_result.changed + + always: + - name: Log output of stop + ansible.builtin.debug: + msg: "{{ stop_output }}" + - name: Remove data set to indicate console is not in use + ibm.ibm_zos_core.zos_data_set: + name: "{{ ansible_user }}.AN{{ build_number }}.STOPCICS.LOCK" + state: absent + register: ds_status + until: ds_status.changed + retries: 3 + delay: 10 + +- name: Assert CICS stop did not fail + when: expect_failure is not defined or expect_failure is false + ansible.builtin.assert: + that: + - stop_output.failed != True + - stop_output.msg == "" + - stop_output.executions[-1].return.failed == False + - stop_output.executions[-1].return.output[0].content | length == 2 + - "'ON OUTPUT QUEUE' in '{{ stop_output.executions[-1].return.output[0].content | join(' ') }}'" + fail_msg: "CICS Region did not stop successfully" + +- name: Assert execution RCs are 0 + when: expect_failure is not defined or expect_failure is false + ansible.builtin.assert: + that: + - item.rc == 0 + msg: Job failed with RC != 0 + loop: "{{ stop_output.executions }}" + +- name: Execute a command to delete job + when: expect_failure is not defined or expect_failure is false + ansible.builtin.command: + cmd: "jcan P {{ start_region_applid }} {{ start_result.stdout }}" + register: delete_result + changed_when: delete_result.rc == 0 diff --git a/tests/integration/targets/cics_start_stop/runme.sh b/tests/integration/targets/cics_start_stop/runme.sh new file mode 100755 index 00000000..c269c0bf --- /dev/null +++ b/tests/integration/targets/cics_start_stop/runme.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +set -eux + +VAR_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/provisioning.yml" +INV_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/inventory_zos.yml" +ZOS_ENV="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/zos.yml" + +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/provisioning_and_deprovisioning.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/validate_console_not_defined.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/validate_console_autoinstall_fail.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/stop_args.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/missing_jobs.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/start_from_PDS.yml diff --git a/tests/integration/targets/cics_utilities/library/cics_version.py b/tests/integration/targets/cics_utilities/library/cics_version.py new file mode 100644 index 00000000..332e7d41 --- /dev/null +++ b/tests/integration/targets/cics_utilities/library/cics_version.py @@ -0,0 +1,58 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._cicsgetversion import (get_dataset_member_version_record) + +from typing import Dict + +HIGH_LEVEL_QUALIFIER = 'CICS_HLQ' + + +class CicsVersion(object): + + def __init__(self): + self._module = AnsibleModule( + argument_spec=self.init_argument_spec(), + ) # type AnsibleModule + self.result = dict(changed=False) # type: dict + + def init_argument_spec(self): # type: () -> Dict + return { + HIGH_LEVEL_QUALIFIER: { + 'required': True, + 'type': 'str' + }, + } + + def _fail(self, msg): # type: (str) -> None + self._module.fail_json(msg=msg, **self.result) + + def _exit(self): + self._module.exit_json(**self.result) + + def main(self): + self.result['params'] = self._module.params + + try: + cics_version = get_dataset_member_version_record(self._module.params.get(HIGH_LEVEL_QUALIFIER)) + self.result['cics_version'] = cics_version + self.result['rc'] = 0 + self._exit() + except Exception as e: + self.result['rc'] = 1 + self.result['exception'] = e + self._fail("Error fetching version information from data set with {0}".format(self._module.params.get(HIGH_LEVEL_QUALIFIER))) + + +def main(): + CicsVersion().main() + + +if __name__ == '__main__': + main() diff --git a/tests/integration/targets/cics_utilities/playbooks/failure.yml b/tests/integration/targets/cics_utilities/playbooks/failure.yml new file mode 100644 index 00000000..13262b29 --- /dev/null +++ b/tests/integration/targets/cics_utilities/playbooks/failure.yml @@ -0,0 +1,333 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: CICS Version Failure Case + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + vars: + ds_name: "{{ zos_user }}.{{ uniquename }}" + + tasks: + # ############################################################################# + # ############################## Initial cleanup ############################## + # ############################################################################# + + - name: Delete data set if it exists + ibm.ibm_zos_core.zos_data_set: + name: "{{ ds_name }}" + state: absent + register: delete_result + + - name: Debug + ansible.builtin.debug: + msg: "{{ delete_result }}" + + - name: Assert Delete Passed + ansible.builtin.assert: + that: + - delete_result.failed == false + + ############################################################################ + # Fail to get CICS version from non-existent data set + ############################################################################ + + - name: Wrap test in block so cleanup always runs + block: + - name: Fail to get CICS version information from non-existent data set + cics_version: + CICS_HLQ: "{{ ds_name }}" + register: result + ignore_errors: true + + - name: Assert non-existent + ansible.builtin.assert: + that: + - result.failed is true + - result is not changed + - result.rc == 1 + - "'exception' in result" + + ############################################################################ + # Create a empty data set + ############################################################################ + - name: Create PDS data set + ibm.ibm_zos_core.zos_data_set: + name: "{{ ds_name }}.SDFHSAMP" + type: PDS + retries: 10 + register: created_result + until: created_result.failed is false + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ created_result }}" + + - name: Wait until created + ibm.ibm_zos_core.zos_find: + patterns: + - "{{ ds_name }}.SDFHSAMP" + register: datasets + until: + - datasets is defined + - datasets.data_sets is defined + - datasets.data_sets != [] + retries: 10 + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ datasets }}" + + - name: Create PDS Member + ibm.ibm_zos_core.zos_data_set: + name: "{{ ds_name }}.SDFHSAMP(DFH0SINX)" + type: MEMBER + retries: 10 + register: created_result + until: created_result.failed is false + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ created_result }}" + + - name: Wait until created + ibm.ibm_zos_core.zos_find: + patterns: + - "DFH0SINX" + pds_patterns: + - "{{ ds_name }}.SDFHSAMP" + register: datasets + until: + - datasets is defined + - datasets.data_sets is defined + - datasets.data_sets != [] + retries: 10 + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ datasets }}" + + ############################################################################ + # Try to read empty data set + ############################################################################ + + - name: Fail to get CICS version information from empty data set + cics_version: + CICS_HLQ: "{{ ds_name }}" + register: result + ignore_errors: true + + - name: Assert failed on empty Member + ansible.builtin.assert: + that: + - result.failed is true + - result is not changed + - result.rc != 0 + - "'exception' in result" + + ############################################################################ + # Create some data and read a non numeric STATUS value + ############################################################################ + + - name: Write data to the data set to test for STATUS field + ansible.builtin.shell: + args: + executable: /rocket/bin/bash + cmd: decho 'STATUS = TEST' "{{ ds_name }}.SDFHSAMP(DFH0SINX)" + + - name: Wait until written + ibm.ibm_zos_core.zos_find: + patterns: + - "DFH0SINX" + pds_patterns: + - "{{ ds_name }}.SDFHSAMP" + contains: "STATUS = TEST" + register: datasets + until: + - datasets is defined + - datasets.data_sets is defined + - datasets.data_sets != [] + retries: 10 + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ datasets }}" + + - name: Get CICS version from new data set + cics_version: + CICS_HLQ: "{{ ds_name }}" + register: result + + - name: Assert string content fails + ansible.builtin.assert: + that: + - result is not changed + - result.cics_version == 'TEST' + - result.rc == 0 + - "'exception' not in result" + + ############################################################################ + # Create and read from data set where STATUS is EOF + ############################################################################ + + - name: Write data to the data set to test for no data after STATUS + ansible.builtin.shell: + args: + executable: /rocket/bin/bash + cmd: decho 'STATUS =' "{{ ds_name }}.SDFHSAMP(DFH0SINX)" + register: res + + - name: Debug + ansible.builtin.debug: + msg: "{{ res }}" + + - name: Wait until written + ibm.ibm_zos_core.zos_find: + patterns: + - "DFH0SINX" + pds_patterns: + - "{{ ds_name }}.SDFHSAMP" + contains: "STATUS =" + register: datasets + until: + - datasets is defined + - datasets.data_sets is defined + - datasets.data_sets != [] + retries: 10 + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ datasets }}" + + - name: Attempt to read data from STATUS which is EOF + cics_version: + CICS_HLQ: "{{ ds_name }}" + register: result + ignore_errors: true + + - name: Assert status EOF fails + ansible.builtin.assert: + that: + - result.failed is true + - result is not changed + - result.rc != 0 + - "'exception' in result" + + # ############################################################################# + # ################################# Teardown ################################## + # ############################################################################# + + always: + - name: Delete data set + ibm.ibm_zos_core.zos_data_set: + state: absent + name: "{{ ds_name }}.SDFHSAMP" + register: delete_result + + - name: Debug + ansible.builtin.debug: + msg: "{{ delete_result }}" + + + - name: Assert Delete Passed + ansible.builtin.assert: + that: + - delete_result.failed == false + + + ############################################################################ + # Create and read from a SEQ data set + ############################################################################ + + - name: Wrap test in block so cleanup always runs + block: + - name: Create SEQ data set + ibm.ibm_zos_core.zos_data_set: + name: "{{ ds_name }}.SEQ" + type: SEQ + retries: 10 + register: created_result + until: created_result.failed is false + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ created_result }}" + + - name: Wait until created + ibm.ibm_zos_core.zos_find: + patterns: + - "{{ ds_name }}.SEQ" + register: datasets + until: + - datasets is defined + - datasets.data_sets is defined + - datasets.data_sets != [] + retries: 10 + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ datasets }}" + + - name: Write data to SEQ data set + ansible.builtin.shell: + args: + executable: /rocket/bin/bash + cmd: decho 'STATUS = 1.0.0' "{{ ds_name }}.SEQ" + + - name: Wait until created + ibm.ibm_zos_core.zos_find: + patterns: + - "{{ ds_name }}.SEQ" + contains: "STATUS = 1.0.0" + register: datasets + until: + - datasets is defined + - datasets.data_sets is defined + - datasets.data_sets != [] + retries: 10 + delay: 5 + + - name: Debug + ansible.builtin.debug: + msg: "{{ datasets }}" + + - name: Read version from SEQ data set + cics_version: + CICS_HLQ: "{{ ds_name }}" + register: result + ignore_errors: true + + - name: Assert Seq data set fails + ansible.builtin.assert: + that: + - result.failed is true + - result is not changed + - "'exception' in result" + + # ############################################################################# + # ################################# Teardown ################################## + # ############################################################################# + + always: + - name: Delete data set + ibm.ibm_zos_core.zos_data_set: + state: absent + name: "{{ ds_name }}.SEQ" + register: delete_result + + - name: Debug + ansible.builtin.debug: + msg: "{{ delete_result }}" + + - name: Assert Delete Passed + ansible.builtin.assert: + that: + - delete_result.failed == false diff --git a/tests/integration/targets/cics_utilities/playbooks/success.yml b/tests/integration/targets/cics_utilities/playbooks/success.yml new file mode 100644 index 00000000..50570bb9 --- /dev/null +++ b/tests/integration/targets/cics_utilities/playbooks/success.yml @@ -0,0 +1,72 @@ +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +--- +- name: CICS Version Success Case + hosts: "all" + gather_facts: false + environment: "{{ environment_vars }}" + + tasks: + ############################################################################ + # Get CICS version for dev version + ############################################################################ + - name: Retrieve CICS version information (Dev) + cics_version: + CICS_HLQ: 'ANTZ.CICS.TS.DEV.INTEGRAT' + register: result + + - name: Assert dev + ansible.builtin.assert: + that: + - result is not changed + - result.cics_version == '7.5.0' + - result.rc == 0 + - "'exception' not in result" + + ############################################################################ + # Get CICS version for 6.1 + ############################################################################ + - name: Retrieve CICS version information (6.1) + cics_version: + CICS_HLQ: 'CTS610.CICS740' + register: result + + - name: Assert 6.1 + ansible.builtin.assert: + that: + - result is not changed + - result.cics_version == '7.4.0' + - result.rc == 0 + - "'exception' not in result" + + ############################################################################ + # Get CICS version for 5.6 + ############################################################################ + - name: Retrieve CICS version information (5.6) + cics_version: + CICS_HLQ: 'CTS560.CICS730' + register: result + + - name: Assert 5.6 + ansible.builtin.assert: + that: + - result is not changed + - result.cics_version == '7.3.0' + - result.rc == 0 + - "'exception' not in result" + + ############################################################################ + # Get CICS version for 5.4 + ############################################################################ + - name: Retrieve CICS version information (5.4) + cics_version: + CICS_HLQ: 'CTS540.CICS710' + register: result + + - name: Assert 5.4 + ansible.builtin.assert: + that: + - result is not changed + - result.cics_version == '7.1.0' + - result.rc == 0 + - "'exception' not in result" diff --git a/tests/integration/targets/cics_utilities/runme.sh b/tests/integration/targets/cics_utilities/runme.sh new file mode 100755 index 00000000..3b90dd77 --- /dev/null +++ b/tests/integration/targets/cics_utilities/runme.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +# (c) Copyright IBM Corp. 2023 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +set -eux # This is important to ensure that return codes from failing tests are propagated + +export ANSIBLE_LIBRARY=./library + +VAR_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/utilities.yml" +INV_PATH="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/inventory_zos.yml" +ZOS_ENV="$ANSIBLE_COLLECTIONS_PATH/ansible_collections/ibm/ibm_zos_cics/tests/integration/variables/zos.yml" + +ansible-playbook -i "$INV_PATH" -e "@$ZOS_ENV" playbooks/success.yml +ansible-playbook -i "$INV_PATH" -e "@$VAR_PATH" -e "@$ZOS_ENV" playbooks/failure.yml diff --git a/tests/integration/template.inventory_zos.yml b/tests/integration/template.inventory_zos.yml new file mode 100644 index 00000000..ad3ec06d --- /dev/null +++ b/tests/integration/template.inventory_zos.yml @@ -0,0 +1,7 @@ +source_system: + hosts: + zos_host: + ansible_host: __ANSIBLE_TEST_HOST__ + ansible_user: __ANSIBLE_TEST_USER__ + ansible_python_interpreter: __ANSIBLE_TEST_PYTHON__/bin/python3 + ansible_ssh_common_args: "-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" diff --git a/tests/integration/variables/template.provisioning.yml b/tests/integration/variables/template.provisioning.yml new file mode 100644 index 00000000..1a22c8d9 --- /dev/null +++ b/tests/integration/variables/template.provisioning.yml @@ -0,0 +1,11 @@ +cics_install_path: +cpsm_data_set_path: +region_data_set_common: "{{ ansible_user }}.CICS.TESTS" +region_data_set_unique: +region_data_set_path: "{{ region_data_set_common }}.{{ region_data_set_unique }}" +uss_path: "/u/{{ ansible_user | lower }}/{{ region_data_set_unique }}" +cics_applid: +le_path: +start_region_applid: +usshome: +build_number: 1230 diff --git a/tests/integration/variables/template.utilities.yml b/tests/integration/variables/template.utilities.yml new file mode 100644 index 00000000..e784c4e1 --- /dev/null +++ b/tests/integration/variables/template.utilities.yml @@ -0,0 +1,2 @@ +zos_user: "{{ ansible_user }}" +uniquename: diff --git a/tests/integration/variables/template.zos.yml b/tests/integration/variables/template.zos.yml new file mode 100644 index 00000000..562c6b47 --- /dev/null +++ b/tests/integration/variables/template.zos.yml @@ -0,0 +1,14 @@ +pyz: "__PYTHON_PATH__" +zoau: "__ZOAU_PATH__" + +environment_vars: + _BPXK_AUTOCVT: "ON" + ZOAU_HOME: "{{ zoau }}" + PYTHONPATH: "{{ zoau }}/lib" + LIBPATH: "{{ zoau }}/lib:{{ pyz }}/lib:/lib:/usr/lib:." + PATH: "{{ zoau }}/bin:{{ pyz }}/bin:/bin:/var/bin" + _CEE_RUNOPTS: "FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)" + _TAG_REDIR_ERR: "txt" + _TAG_REDIR_IN: "txt" + _TAG_REDIR_OUT: "txt" + LANG: "C" diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index ae405ac8..6550864b 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -3,3 +3,13 @@ plugins/modules/cmci_action.py validate-modules:missing-gplv3-license # Licence plugins/modules/cmci_create.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 plugins/modules/cmci_delete.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 plugins/modules/cmci_update.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/global_catalog.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/local_catalog.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/local_request_queue.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/td_intrapartition.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/aux_temp_storage.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/aux_trace.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/csd.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/transaction_dump.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/region_jcl.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/stop_region.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index adf08117..6550864b 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -2,4 +2,14 @@ plugins/modules/cmci_get.py validate-modules:missing-gplv3-license # Licence is plugins/modules/cmci_action.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 plugins/modules/cmci_create.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 plugins/modules/cmci_delete.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 -plugins/modules/cmci_update.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 \ No newline at end of file +plugins/modules/cmci_update.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/global_catalog.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/local_catalog.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/local_request_queue.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/td_intrapartition.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/aux_temp_storage.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/aux_trace.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/csd.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/transaction_dump.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/region_jcl.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/stop_region.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index adf08117..6550864b 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -2,4 +2,14 @@ plugins/modules/cmci_get.py validate-modules:missing-gplv3-license # Licence is plugins/modules/cmci_action.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 plugins/modules/cmci_create.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 plugins/modules/cmci_delete.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 -plugins/modules/cmci_update.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 \ No newline at end of file +plugins/modules/cmci_update.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/global_catalog.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/local_catalog.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/local_request_queue.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/td_intrapartition.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/aux_temp_storage.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/aux_trace.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/csd.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/transaction_dump.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/region_jcl.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 +plugins/modules/stop_region.py validate-modules:missing-gplv3-license # Licence is Apache-2.0 diff --git a/tests/unit/action/test_data_set.py b/tests/unit/action/test_data_set.py new file mode 100644 index 00000000..1614f723 --- /dev/null +++ b/tests/unit/action/test_data_set.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.parsing.dataloader import DataLoader +from ansible.template import Templar +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import ( + _process_module_args +) + + +def get_templar(module_args): + loader = DataLoader() + templar = Templar(loader=loader, variables=module_args) + return templar + + +def test_data_set_with_template(): + args_with_template = { + "region_data_sets": {"template": "data.set.template.<< data_set_name >>"}, + "space_primary": 2, + "space_secondary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_template) + task_vars = args_with_template + + _process_module_args(args_with_template, templar, "dfhlrq", task_vars, False) + + assert args_with_template == { + "region_data_sets": { + "dfhlrq": {"dsn": "data.set.template.DFHLRQ"}, + "template": "data.set.template.<< data_set_name >>", + }, + "space_primary": 2, + "space_secondary": 1, + "space_type": "M", + "state": "initial" + } + + +def test_data_set_with_override(): + args_with_override = { + "region_data_sets": {"dfhlrq": {"dsn": "data.set.path"}}, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_override) + task_vars = args_with_override + + _process_module_args(args_with_override, templar, "dfhlrq", task_vars, False) + + assert args_with_override == { + "region_data_sets": { + "dfhlrq": {"dsn": "data.set.path"} + }, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + + +def test_data_set_with_override_but_no_dsn_key(): + args_with_override = { + "region_data_sets": {"dfhlrq": {"garbage": "more.garbage"}}, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_override) + task_vars = args_with_override + + try: + _process_module_args(args_with_override, templar, "dfhlrq", task_vars, False) + except KeyError as e: + assert e.args[0] == "No template or data set override found for dfhlrq" + else: + assert False + + +def test_data_set_with_override_but_no_dsn_value(): + args_with_override = { + "region_data_sets": {"dfhlrq": {"dsn": None}}, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_override) + task_vars = args_with_override + + try: + _process_module_args(args_with_override, templar, "dfhlrq", task_vars, False) + except KeyError as e: + assert e.args[0] == "No template or data set override found for dfhlrq" + else: + assert False + + +def test_data_set_without_override_or_template(): + args_with_garbage = { + "region_data_sets": {"garbage": "more.garbage"}, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_garbage) + task_vars = args_with_garbage + + try: + _process_module_args(args_with_garbage, templar, "dfhlrq", task_vars, False) + except KeyError as e: + assert e.args[0] == "No template or data set override found for dfhlrq" + else: + assert False + + +def test_data_set_with_unnecessary_cics_data_sets_arg(): + args_with_template = { + "region_data_sets": {"template": "data.set.template.<< data_set_name >>"}, + "cics_data_sets": {"template": "data.set.template.<< lib_name >>"}, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_template) + task_vars = args_with_template + + _process_module_args(args_with_template, templar, "dfhlrq", task_vars, False) + + assert args_with_template == { + "region_data_sets": { + "dfhlrq": {"dsn": "data.set.template.DFHLRQ"}, + "template": "data.set.template.<< data_set_name >>", + }, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + + +def test_data_set_with_le_data_sets_arg(): + args_with_template = { + "region_data_sets": {"template": "data.set.template.<< data_set_name >>"}, + "le_data_sets": {"template": "le.data.set.template.<< lib_name >>"}, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_template) + task_vars = args_with_template + + _process_module_args(args_with_template, templar, "dfhlrq", task_vars, False) + + assert args_with_template == { + "region_data_sets": { + "dfhlrq": {"dsn": "data.set.template.DFHLRQ"}, + "template": "data.set.template.<< data_set_name >>", + }, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + + +def test_data_set_with_cpsm_data_sets_arg(): + args_with_template = { + "region_data_sets": {"template": "data.set.template.<< data_set_name >>"}, + "cpsm_data_sets": {"template": "cpsm.data.set.template.<< lib_name >>"}, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_template) + task_vars = args_with_template + + _process_module_args(args_with_template, templar, "dfhlrq", task_vars, False) + + assert args_with_template == { + "region_data_sets": { + "dfhlrq": {"dsn": "data.set.template.DFHLRQ"}, + "template": "data.set.template.<< data_set_name >>", + }, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + + +def test_data_set_with_required_cics_data_sets_templated(): + args_with_template = { + "region_data_sets": {"template": "data.set.template.<< data_set_name >>"}, + "cics_data_sets": {"template": "data.set.template.<< lib_name >>"}, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } + templar = get_templar(args_with_template) + task_vars = args_with_template + + _process_module_args(args_with_template, templar, "dfhgcd", task_vars, True) + + assert args_with_template == { + "region_data_sets": { + "dfhgcd": {"dsn": "data.set.template.DFHGCD"}, + "template": "data.set.template.<< data_set_name >>", + }, + "cics_data_sets": { + "template": "data.set.template.<< lib_name >>", + "sdfhload": "data.set.template.SDFHLOAD" + }, + "space_primary": 1, + "space_type": "M", + "state": "initial" + } diff --git a/tests/unit/action/test_region_jcl.py b/tests/unit/action/test_region_jcl.py new file mode 100644 index 00000000..e68d53b3 --- /dev/null +++ b/tests/unit/action/test_region_jcl.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.region_jcl import DSN +from ansible_collections.ibm.ibm_zos_cics.plugins.action.region_jcl import _process_module_args +from ansible.parsing.dataloader import DataLoader +from ansible.template import Templar + + +def get_templar(module_args): + loader = DataLoader() + templar = Templar(loader=loader, variables=module_args) + return templar + + +def test_process_args_with_only_template(): + module_args = { + "region_data_sets": {"template": "TEST.CICSPY1.RDEV.<< data_set_name >>"}, + "cics_data_sets": {"template": "TEST.CICS.<< lib_name >>"}, + "le_data_sets": {"template": "TEST.LE.<< lib_name >>"} + } + templar = get_templar(module_args) + task_vars = module_args + _process_module_args(module_args, templar, task_vars) + assert module_args == { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.DFHAUXT"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.DFHBUXT"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.DFHCSD"}, + 'dfhgcd': {DSN: "TEST.CICSPY1.RDEV.DFHGCD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.DFHINTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.DFHLCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.DFHLRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.DFHTEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DFHDMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DFHDMPB"}, + 'dfhstart': {DSN: "TEST.CICSPY1.RDEV.DFHSTART"}, + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.SDFHLOAD", + "sdfhauth": "TEST.CICS.SDFHAUTH", + "sdfhlic": "TEST.CICS.SDFHLIC", + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "sceecics": "TEST.LE.SCEECICS", + "sceerun": "TEST.LE.SCEERUN", + "sceerun2": "TEST.LE.SCEERUN2", + "template": "TEST.LE.<< lib_name >>" + }, + "steplib": { + "top_data_sets": [] + }, + "dfhrpl": { + "top_data_sets": [] + } + } + + +def test_process_args_with_some_overrides(): + module_args = { + "region_data_sets": { + "template": "TEST.CICSPY1.RDEV.<< data_set_name >>", + "dfhauxt": {DSN: "TEST.CICSPY1.RDEV.TRACE1"} + }, + "cics_data_sets": { + "template": "TEST.CICS.<< lib_name >>", + "sdfhload": "TEST.CICS.LOAD" + }, + "le_data_sets": { + "template": "TEST.LE.<< lib_name >>", + "sceerun": "TEST.LE.RUN" + } + } + templar = get_templar(module_args) + task_vars = module_args + _process_module_args(module_args, templar, task_vars) + assert module_args == { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.TRACE1"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.DFHBUXT"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.DFHCSD"}, + 'dfhgcd': {DSN: "TEST.CICSPY1.RDEV.DFHGCD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.DFHINTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.DFHLCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.DFHLRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.DFHTEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DFHDMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DFHDMPB"}, + 'dfhstart': {DSN: "TEST.CICSPY1.RDEV.DFHSTART"}, + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.LOAD", + "sdfhauth": "TEST.CICS.SDFHAUTH", + "sdfhlic": "TEST.CICS.SDFHLIC", + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "sceecics": "TEST.LE.SCEECICS", + "sceerun": "TEST.LE.RUN", + "sceerun2": "TEST.LE.SCEERUN2", + "template": "TEST.LE.<< lib_name >>" + }, + "steplib": { + "top_data_sets": [] + }, + "dfhrpl": { + "top_data_sets": [] + } + } + + +def test_process_args_with_only_overrides(): + module_args = { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.TRACE1"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.TRACE2"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.CSD"}, + 'dfhgcd': {DSN: "TEST.CICSPY1.RDEV.GCD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.INTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.LCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.LRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.TEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DUMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DUMPB"}, + 'dfhstart': {DSN: "TEST.CICSPY1.RDEV.START"}, + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.LOAD", + "sdfhauth": "TEST.CICS.AUTH", + "sdfhlic": "TEST.CICS.LIC", + }, + "le_data_sets": { + "sceecics": "TEST.LE.CICS", + "sceerun": "TEST.LE.RUN", + "sceerun2": "TEST.LE.RUN2", + }, + } + templar = get_templar(module_args) + task_vars = module_args + _process_module_args(module_args, templar, task_vars) + assert module_args == { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.TRACE1"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.TRACE2"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.CSD"}, + 'dfhgcd': {DSN: "TEST.CICSPY1.RDEV.GCD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.INTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.LCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.LRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.TEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DUMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DUMPB"}, + 'dfhstart': {DSN: "TEST.CICSPY1.RDEV.START"}, + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.LOAD", + "sdfhauth": "TEST.CICS.AUTH", + "sdfhlic": "TEST.CICS.LIC", + }, + "le_data_sets": { + "sceecics": "TEST.LE.CICS", + "sceerun": "TEST.LE.RUN", + "sceerun2": "TEST.LE.RUN2", + }, + "steplib": { + "top_data_sets": [] + }, + "dfhrpl": { + "top_data_sets": [] + } + } + + +def test_process_args_with_missing_overrides_no_template(): + module_args = { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.TRACE1"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.TRACE2"}, + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.LOAD", + }, + "le_data_sets": { + "sceecics": "TEST.LE.CICS", + }, + } + templar = get_templar(module_args) + task_vars = module_args + try: + _process_module_args(module_args, templar, task_vars) + except KeyError as e: + assert e.args[0] == "No template or library override found for sdfhauth" + else: + assert False + + +def test_process_args_with_one_missing_override_no_template(): + module_args = { + "region_data_sets": { + "template": "TEST.CICSPY1.RDEV.<< data_set_name >>" + }, + "cics_data_sets": { + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "sceecics": "TEST.LE.CICS", + "sceerun2": "TEST.LE.RUN2", + } + } + templar = get_templar(module_args) + task_vars = module_args + try: + _process_module_args(module_args, templar, task_vars) + except KeyError as e: + assert e.args[0] == "No template or library override found for sceerun" + else: + assert False + + +def test_process_args_with_missing_region_data_sets(): + module_args = { + "cics_data_sets": { + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "template": "TEST.LE.<< lib_name >>" + } + } + templar = get_templar(module_args) + task_vars = module_args + try: + _process_module_args(module_args, templar, task_vars) + except KeyError as e: + assert e.args[0] == "Required argument region_data_sets not found" + else: + assert False + + +def test_process_args_with_missing_dsn_key(): + module_args = { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.TRACE1"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.TRACE2"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.CSD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.INTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.LCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.LRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.TEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DUMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DUMPB"}, + 'dfhstart': {DSN: "TEST.CICSPY1.RDEV.START"}, + "dfhgcd": {"garbage": "more.garbage"} + }, + "cics_data_sets": { + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "template": "TEST.LE.<< lib_name >>" + } + } + templar = get_templar(module_args) + task_vars = module_args + try: + _process_module_args(module_args, templar, task_vars) + except KeyError as e: + assert e.args[0] == "No template or data set override found for dfhgcd" + else: + assert False + + +def test_process_args_with_missing_dsn_value(): + module_args = { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.TRACE1"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.TRACE2"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.CSD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.INTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.LCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.LRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.TEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DUMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DUMPB"}, + 'dfhstart': {DSN: "TEST.CICSPY1.RDEV.START"}, + "dfhgcd": {"dsn": None} + }, + "cics_data_sets": { + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "template": "TEST.LE.<< lib_name >>" + } + } + templar = get_templar(module_args) + task_vars = module_args + try: + _process_module_args(module_args, templar, task_vars) + except KeyError as e: + assert e.args[0] == "No template or data set override found for dfhgcd" + else: + assert False + + +def test_process_args_with_only_template_and_optional_cpsm_arg(): + module_args = { + "region_data_sets": {"template": "TEST.CICSPY1.RDEV.<< data_set_name >>"}, + "cics_data_sets": {"template": "TEST.CICS.<< lib_name >>"}, + "le_data_sets": {"template": "TEST.LE.<< lib_name >>"}, + "cpsm_data_sets": {"template": "TEST.CPSM.<< lib_name >>"} + } + templar = get_templar(module_args) + task_vars = module_args + _process_module_args(module_args, templar, task_vars) + assert module_args == { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.DFHAUXT"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.DFHBUXT"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.DFHCSD"}, + 'dfhgcd': {DSN: "TEST.CICSPY1.RDEV.DFHGCD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.DFHINTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.DFHLCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.DFHLRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.DFHTEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DFHDMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DFHDMPB"}, + 'dfhstart': {DSN: "TEST.CICSPY1.RDEV.DFHSTART"}, + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.SDFHLOAD", + "sdfhauth": "TEST.CICS.SDFHAUTH", + "sdfhlic": "TEST.CICS.SDFHLIC", + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "sceecics": "TEST.LE.SCEECICS", + "sceerun": "TEST.LE.SCEERUN", + "sceerun2": "TEST.LE.SCEERUN2", + "template": "TEST.LE.<< lib_name >>" + }, + "cpsm_data_sets": { + "seyuauth": "TEST.CPSM.SEYUAUTH", + "seyuload": "TEST.CPSM.SEYULOAD", + "template": "TEST.CPSM.<< lib_name >>" + }, + "steplib": { + "top_data_sets": [] + }, + "dfhrpl": { + "top_data_sets": [] + } + } + + +def test_process_args_with_optional_cpsm_arg_and_overrides(): + module_args = { + "region_data_sets": {"template": "TEST.CICSPY1.RDEV.<< data_set_name >>"}, + "cics_data_sets": {"template": "TEST.CICS.<< lib_name >>"}, + "le_data_sets": {"template": "TEST.LE.<< lib_name >>"}, + "cpsm_data_sets": { + "template": "TEST.CPSM.<< lib_name >>", + "seyuauth": "TEST.SEYUAUTH", + "seyuload": "TEST.SEYULOAD" + } + } + templar = get_templar(module_args) + task_vars = module_args + _process_module_args(module_args, templar, task_vars) + assert module_args == { + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.DFHAUXT"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.DFHBUXT"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.DFHCSD"}, + 'dfhgcd': {DSN: "TEST.CICSPY1.RDEV.DFHGCD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.DFHINTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.DFHLCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.DFHLRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.DFHTEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DFHDMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DFHDMPB"}, + 'dfhstart': {DSN: "TEST.CICSPY1.RDEV.DFHSTART"}, + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.SDFHLOAD", + "sdfhauth": "TEST.CICS.SDFHAUTH", + "sdfhlic": "TEST.CICS.SDFHLIC", + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "sceecics": "TEST.LE.SCEECICS", + "sceerun": "TEST.LE.SCEERUN", + "sceerun2": "TEST.LE.SCEERUN2", + "template": "TEST.LE.<< lib_name >>" + }, + "cpsm_data_sets": { + "seyuauth": "TEST.SEYUAUTH", + "seyuload": "TEST.SEYULOAD", + "template": "TEST.CPSM.<< lib_name >>" + }, + "steplib": { + "top_data_sets": [] + }, + "dfhrpl": { + "top_data_sets": [] + } + } diff --git a/tests/unit/action/test_stop_region.py b/tests/unit/action/test_stop_region.py new file mode 100644 index 00000000..50b2b09f --- /dev/null +++ b/tests/unit/action/test_stop_region.py @@ -0,0 +1,330 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +from datetime import datetime, timedelta + +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.stop_action_helper import ( + get_operator_shutdown_response, + get_tso_status_response, + get_job_query_result, + CONSOLE_AUTOINSTALL_FAIL, + CONSOLE_UNDEFINED, +) + +# Required for mocking of datetime import in this file +import ansible_collections.ibm.ibm_zos_cics.plugins.action.stop_region as stop_region_action +from ansible_collections.ibm.ibm_zos_cics.plugins.action.stop_region import ( + get_console_errors, + calculate_end_time, + format_cancel_command, + format_shutdown_command, + _get_job_info_from_status, + _get_job_name_from_query, + _get_job_status_name_id, +) +from ansible.errors import AnsibleActionFail + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock +import pytest + + +def test_calculate_end_time(): + now = datetime.now() + timeout_seconds = 10 + stop_region_action.get_datetime_now = MagicMock(return_value=now) + + assert calculate_end_time(timeout_seconds) == now + \ + timedelta(0, timeout_seconds) + + +def test_format_cancel_command(): + job_name = "LINKJOB" + job_id = "JOB12345" + + cmd = format_cancel_command(job_name, job_id) + assert cmd == "jcan C LINKJOB JOB12345" + + +def test_format_shutdown_command_immediate(): + job_name = "LINKJOB" + mode = "immediate" + + cmd = format_shutdown_command(job_name, mode) + assert cmd == "MODIFY LINKJOB,CEMT PERFORM SHUTDOWN IMMEDIATE" + + +def test_format_shutdown_command_normal(): + job_name = "LINKJOB" + mode = "normal" + + cmd = format_shutdown_command(job_name, mode) + assert cmd == "MODIFY LINKJOB,CEMT PERFORM SHUTDOWN" + + +def test_format_shutdown_command_sd(): + job_name = "LINKJOB" + mode = "normal" + + cmd = format_shutdown_command(job_name, mode, sdtran="DEFG") + assert cmd == "MODIFY LINKJOB,CEMT PERFORM SHUTDOWN SDTRAN(DEFG)" + + +def test_format_shutdown_command_nosd(): + job_name = "LINKJOB" + mode = "normal" + + cmd = format_shutdown_command(job_name, mode, no_sdtran=True) + assert cmd == "MODIFY LINKJOB,CEMT PERFORM SHUTDOWN NOSDTRAN" + + +def test_console_error_valid(): + shutdown_result = get_operator_shutdown_response() + try: + # Assert void method does not error + get_console_errors(shutdown_result) + assert True + except Exception as e: + assert False, "'get_console_errors' raised exception {0}".format( + str(e)) + + +def test_console_error_undefined(): + shutdown_result = get_operator_shutdown_response(console=CONSOLE_UNDEFINED) + + with pytest.raises(AnsibleActionFail) as action_err: + get_console_errors(shutdown_result) + assert "Shutdown command failed because the console used was not defined" in str( + action_err + ) + + +def test_console_error_install(): + shutdown_result = get_operator_shutdown_response( + console=CONSOLE_AUTOINSTALL_FAIL) + + with pytest.raises(AnsibleActionFail) as action_err: + get_console_errors(shutdown_result) + assert ( + "Shutdown command failed because the auto-install of the console was unsuccessful" + in str(action_err) + ) + + +def test_get_job_info_from_status_1_running(): + job_name = "JOBNAM" + job_id = "JOB12345" + tso_query_response = get_tso_status_response( + jobname=job_name, running_job_id=job_id, stopped=0 + ) + + assert _get_job_info_from_status(tso_query_response, job_name) == [ + { + "job_name": job_name, + "job_id": job_id, + "status": "EXECUTING", + } + ] + + +def test_get_job_info_from_status_0_running(): + job_name = "JOBNAM" + tso_query_response = get_tso_status_response( + jobname=job_name, stopped=0, running=0) + assert _get_job_info_from_status(tso_query_response, job_name) == [] + + +def test_get_job_info_from_status_2_running(): + job_name = "JOBNAM" + job_id = "JOB12345" + tso_query_response = get_tso_status_response( + jobname=job_name, running_job_id=job_id, stopped=0, running=2 + ) + + assert _get_job_info_from_status(tso_query_response, job_name) == [ + { + "job_name": job_name, + "job_id": job_id, + "status": "EXECUTING", + }, + { + "job_name": job_name, + "job_id": job_id, + "status": "EXECUTING", + }, + ] + + +def test_get_job_info_from_status_0_running_1_stopped(): + job_name = "JOBNAM" + job_id = "JOB12345" + tso_query_response = get_tso_status_response( + jobname=job_name, stopped_job_id=job_id, running=0 + ) + + assert _get_job_info_from_status(tso_query_response, job_name) == [ + { + "job_name": job_name, + "job_id": job_id, + "status": "ON OUTPUT QUEUE", + }, + ] + + +def test_get_job_info_from_status_0_running_2_stopped(): + job_name = "JOBNAM" + job_id = "JOB12345" + tso_query_response = get_tso_status_response( + jobname=job_name, stopped_job_id=job_id, running=0, stopped=2 + ) + + assert _get_job_info_from_status(tso_query_response, job_name) == [ + { + "job_name": job_name, + "job_id": job_id, + "status": "ON OUTPUT QUEUE", + }, + { + "job_name": job_name, + "job_id": job_id, + "status": "ON OUTPUT QUEUE", + }, + ] + + +def test_get_job_info_from_status_1_running_2_stopped(): + job_name = "JOBNAM" + job_id = "JOB12345" + stopped_id = "JOB98765" + tso_query_response = get_tso_status_response( + jobname=job_name, running_job_id=job_id, stopped_job_id=stopped_id, stopped=2 + ) + + assert _get_job_info_from_status(tso_query_response, job_name) == [ + { + "job_name": job_name, + "job_id": job_id, + "status": "EXECUTING", + }, + { + "job_name": job_name, + "job_id": stopped_id, + "status": "ON OUTPUT QUEUE", + }, + { + "job_name": job_name, + "job_id": stopped_id, + "status": "ON OUTPUT QUEUE", + }, + ] + + +def test_get_job_name_from_query(): + job_name = "JOBNAM" + job_id = "JOB12345" + job_query_response = get_job_query_result(jobname=job_name) + + assert _get_job_name_from_query(job_query_response, job_id) == job_name + + +def test_get_job_name_from_query_failed(): + job_name = "JOBNAM" + job_id = "JOB12345" + job_query_response = get_job_query_result(jobname=job_name, failed=True) + + with pytest.raises(AnsibleActionFail) as action_err: + _get_job_name_from_query(job_query_response, job_id) + assert "Job query failed - (No failure message provided by zos_job_query)" in str( + action_err + ) + + +def test_get_job_name_from_query_failed_msg(): + job_name = "JOBNAM" + job_id = "JOB12345" + job_query_response = get_job_query_result( + jobname=job_name, failed=True, message="MEANINGFUL MSG FROM CORE" + ) + + with pytest.raises(AnsibleActionFail) as action_err: + _get_job_name_from_query(job_query_response, job_id) + assert "Job query failed - MEANINGFUL MSG FROM CORE" in str(action_err) + + +def test_get_job_name_from_query_0_jobs(): + job_name = "JOBNAM" + job_id = "JOB12345" + job_query_response = get_job_query_result(jobname=job_name, jobs=0) + + with pytest.raises(AnsibleActionFail) as action_err: + _get_job_name_from_query(job_query_response, job_id) + assert "No jobs found with id {0}".format(job_id) in str(action_err) + + +def test_get_job_name_from_query_missing_jobs(): + job_name = "JOBNAM" + job_id = "JOB12345" + job_query_response = get_job_query_result( + jobname=job_name, no_jobs_found=True) + + with pytest.raises(AnsibleActionFail) as action_err: + _get_job_name_from_query(job_query_response, job_id) + assert "No jobs found with id {0}".format(job_id) in str(action_err) + + +def test_get_job_name_from_query_multiple_jobs(): + job_name = "JOBNAM" + job_id = "JOB12345" + job_query_response = get_job_query_result(jobname=job_name, jobs=2) + + with pytest.raises(AnsibleActionFail) as action_err: + _get_job_name_from_query(job_query_response, job_id) + assert "Multiple jobs found with ID {0}".format(job_id) in str(action_err) + + +def test_get_job_status_name_id(): + job_name = "JOBNAM" + job_id = "JOB12345" + stopped_id = "JOB98765" + tso_query_response = get_tso_status_response( + jobname=job_name, running_job_id=job_id, stopped_job_id=stopped_id + ) + assert _get_job_status_name_id( + tso_query_response, job_name, job_id) == "EXECUTING" + + +def test_get_job_status_name_id_no_output(): + job_name = "JOBNAM" + job_id = "JOB12345" + with pytest.raises(AnsibleActionFail) as action_err: + _get_job_status_name_id({}, job_name, job_id) + assert "Output not received for TSO STATUS command" in str(action_err) + + +def test_get_job_status_name_id_0_jobs(): + job_name = "JOBNAM" + job_id = "JOB12345" + tso_query_response = get_tso_status_response( + jobname=job_name, running=0, stopped=0) + with pytest.raises(AnsibleActionFail) as action_err: + _get_job_status_name_id(tso_query_response, job_name, job_id) + assert "No jobs found with name {0} and ID {1}".format(job_name, job_id) in str( + action_err + ) + + +def test_get_job_status_name_id_2_jobs(): + job_name = "JOBNAM" + job_id = "JOB12345" + tso_query_response = get_tso_status_response( + jobname=job_name, running=2, stopped=0) + with pytest.raises(AnsibleActionFail) as action_err: + _get_job_status_name_id(tso_query_response, job_name, job_id) + assert "Multiple jobs with name and ID found" in str(action_err) diff --git a/tests/unit/helpers/cmci_helper.py b/tests/unit/helpers/cmci_helper.py index 2f002743..29c6487b 100644 --- a/tests/unit/helpers/cmci_helper.py +++ b/tests/unit/helpers/cmci_helper.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/tests/unit/helpers/data_set_helper.py b/tests/unit/helpers/data_set_helper.py new file mode 100644 index 00000000..ce1cb612 --- /dev/null +++ b/tests/unit/helpers/data_set_helper.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +import json +from textwrap import dedent +from ansible.module_utils.common.text.converters import to_bytes +from ansible.module_utils import basic + +PYTHON_LANGUAGE_FEATURES_MESSAGE = "Requires python 3 language features" + +default_data_set = { + "exists": False, + "name": None, + "size": { + "primary": 5, + "secondary": 1, + "unit": "M" + }, + "state": "initial", + "vsam": False +} + + +def set_module_args(args): + basic._ANSIBLE_ARGS = to_bytes(json.dumps({'ANSIBLE_MODULE_ARGS': args})) + + +def LISTDS_run_name(run): + return "IKJEFT01 - Get Data Set Status - Run {0}".format(run) + + +def LISTDS_data_set_doesnt_exist(data_set_name): + return """ + 1READY + LISTDS '{0}' + {0} + DATA SET '{0}' NOT IN CATALOG + READY + END + """.format(data_set_name) + + +def LISTDS_member_doesnt_exist(base_data_set_name, member_name): + return """ + 1READY + LISTDS '{0}({1})' + {0} + --RECFM-LRECL-BLKSIZE-DSORG + FB 80 27920 PO + --VOLUMES-- + P2P117 + DIRECTORY INFORMATION NOT AVAILABLE+ + MEMBER NAME NOT FOUND + READY + END + """.format(base_data_set_name, member_name) + + +def LISTDS_data_set(data_set_name, dsorg): + return """ + 1READY + LISTDS '{0}' + {0} + --LRECL--DSORG- + ** {1} + --VOLUMES-BLKSIZE + ** + READY + END + """.format(data_set_name, dsorg) + + +def LISTSDS_member_data_set(base_data_set_name, member_name): + return """ + 1READY + LISTDS '{0}({1})' + {0} + --RECFM-LRECL-BLKSIZE-DSORG + FB 80 27920 PO + --VOLUMES-- + P2P117 + --MEMBER---TTR----ALIAS-TTRN-CNT-DATA + {1} 000110 NO 0 00 + READY + END + """.format(base_data_set_name, member_name) + + +def IDCAMS_run_cmd(data_set_name): + return """ + DEFINE CLUSTER - + (NAME({0}) - + INDEXED - + MEGABYTES(5 1) - + SHR(2) - + FREESPACE(10 10) - + RECORDSIZE(4089 32760) - + REUSE) - + DATA - + (NAME({0}.DATA) - + CONTROLINTERVALSIZE(32768) - + KEYS(52 0)) - + INDEX - + (NAME({0}.INDEX)) + """.format(data_set_name) + + +def IDCAMS_create_run_name(run, data_set_name): + return "IDCAMS - Creating {0} data set - Run {1}".format(data_set_name, run) + + +def IDCAMS_delete_run_name(run, data_set_name): + return "IDCAMS - {0} - Run {1}".format(data_set_name, run) + + +def IDCAMS_create_stdout(data_set_name): + return """ + 1IDCAMS SYSTEM SERVICES TIME: 10:04:57 + 06/29/23 PAGE 1 + 0 + DEFINE CLUSTER - + (NAME({0}) + 0IDC0508I DATA ALLOCATION STATUS FOR VOLUME P2P0D5 IS 0 + 0IDC0509I INDEX ALLOCATION STATUS FOR VOLUME P2P0D5 IS 0 + IDC0181I STORAGECLASS USED IS STANDARD + IDC0181I MANAGEMENTCLASS USED IS STANDARD + 0IDC0001I FUNCTION COMPLETED, HIGHEST CONDITION CODE WAS 0 + 0 + + 0IDC0002I IDCAMS PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 0 + """.format(data_set_name) + + +def IDCAMS_create_already_exists_stdout(data_set_name): + return """ + 1IDCAMS SYSTEM SERVICES TIME: 10:04:51 + 06/29/23 PAGE 1 + 0 + DEFINE CLUSTER - + (NAME({0}) + 0IGD17101I DATA SET {0} + NOT DEFINED BECAUSE DUPLICATE NAME EXISTS IN CATALOG + RETURN CODE IS 8 REASON CODE IS 38 IGG0CLEH + IGD17219I UNABLE TO CONTINUE DEFINE OF DATA SET + {0} + 0IDC3013I DUPLICATE DATA SET NAME + IDC3009I ** VSAM CATALOG RETURN CODE IS 8 - REASON CODE IS IGG0CLEH-38 + 0IDC3003I FUNCTION TERMINATED. CONDITION CODE IS 12 + 0 + + 0IDC0002I IDCAMS PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 12 + """.format(data_set_name) + + +def IDCAMS_delete(data_set_name): + return """ + 1IDCAMS SYSTEM SERVICES TIME: 18:54:07 01/29/24 PAGE 1 + 0 + DELETE {0} + 0IDC0550I ENTRY (D) {0}.DATA DELETED + 0IDC0550I ENTRY (I) {0}.INDEX DELETED + 0IDC0550I ENTRY (C) {0} DELETED + 0IDC0001I FUNCTION COMPLETED, HIGHEST CONDITION CODE WAS 0 + 0 + + 0IDC0002I IDCAMS PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 0 + """.format(data_set_name) + + +def IDCAMS_delete_not_found(data_set_name): + return """ + 1IDCAMS SYSTEM SERVICES TIME: 10:15:24 + 06/29/23 PAGE 1 + 0 + DELETE {0} + 0IDC3012I ENTRY {0} NOT FOUND + IDC3009I ** VSAM CATALOG RETURN CODE IS 8 - REASON CODE IS IGG0CLEG-42 + IDC0551I ** ENTRY {0} NOT DELETED + 0IDC0001I FUNCTION COMPLETED, HIGHEST CONDITION CODE WAS 8 + 0 + + 0IDC0002I IDCAMS PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 8 + """.format(data_set_name) + + +def IEFBR14_create_stderr(data_set_name, dd_name): + return """ + BGYSC0307I Program: Arguments: <> + BGYSC0308I DDNames: + BGYSC0312I {1}={0} + BGYSC0303I Dataset allocation succeeded for {1}={0} + BGYSC0328I OS Load program IEFBR14 + BGYSC0320I Addressing mode: AMODE24 + BGYSC0327I Attach Exit code: 0 from IEFBR14 + BGYSC0338I Dataset free succeeded for {1}={0} + """.format(data_set_name, dd_name) + + +def IEFBR14_get_run_name(run): + return "IEFBR14 - DFHIEFT - Run {0}".format(run) + + +def ICETOOL_name(count): + return "ICETOOL - Get record count - Run {0}".format(count) + + +def ICETOOL_stdout(count): + return """ + 1ICE200I 0 IDENTIFIER FROM CALLING PROGRAM IS 0001 + ICE201I C RECORD TYPE IS F - DATA STARTS IN POSITION 1 + ICE751I 0 C5-I79518 C6-I90068 C7-I76949 C8-I75151 EE-I76949 E9-I77769 C9-NONE E5-I92416 E7-I76949 + ICE143I 0 BLOCKSET COPY TECHNIQUE SELECTED + ICE250I 0 VISIT http://www.ibm.com/storage/dfsort FOR DFSORT PAPERS, EXAMPLES AND MORE + ICE000I 0 - CONTROL STATEMENTS FOR 5650-ZOS, Z/OS DFSORT V2R4 - 18:44 ON MON JAN 29, 2024 - + 0 DEBUG NOABEND,ESTAE + OPTION MSGDDN=DFSMSG,LIST,MSGPRT=ALL,RESINV=0,SORTIN=DD1,COPY,NOCHECK + MODS E35=(ICE35DU,12288) + ICE193I 0 ICEAM2 INVOCATION ENVIRONMENT IN EFFECT - ICEAM2 ENVIRONMENT SELECTED + ICE088I 0 HUGHEA8 .STEP1 . , INPUT LRECL = 2041, BLKSIZE = 2048, TYPE = F + ICE093I 0 MAIN STORAGE = (MAX,6291456,6291456) + ICE156I 0 MAIN STORAGE ABOVE 16MB = (6250480,6234096) + ICE127I 0 OPTIONS: OVFLO=RC0 ,PAD=RC0 ,TRUNC=RC0 ,SPANINC=RC16,VLSCMP=N,SZERO=Y,RESET=Y,VSAMEMT=Y,DYNSPC=256 + ICE128I 0 OPTIONS: SIZE=6291456,MAXLIM=1048576,MINLIM=450560,EQUALS=N,LIST=Y,ERET=RC16 ,MSGDDN=DFSMSG + ICE129I 0 OPTIONS: VIO=N,RESDNT=ALL ,SMF=NO ,WRKSEC=Y,OUTSEC=Y,VERIFY=N,CHALT=N,DYNALOC=N ,ABCODE=MSG + ICE130I 0 OPTIONS: RESALL=4096,RESINV=0,SVC=109 ,CHECK=N,WRKREL=Y,OUTREL=Y,CKPT=N,COBEXIT=COB2,ZSORT=N + ICE131I 0 OPTIONS: TMAXLIM=6291456,ARESALL=0,ARESINV=0,OVERRGN=16384,CINV=Y,CFW=Y,DSA=0 + ICE132I 0 OPTIONS: VLSHRT=N,ZDPRINT=Y,IEXIT=N,TEXIT=N,LISTX=N,EFS=NONE ,EXITCK=S,PARMDDN=DFSPARM ,FSZEST=N + ICE133I 0 OPTIONS: HIPRMAX=OPTIMAL,DSPSIZE=MAX ,ODMAXBF=0,SOLRF=Y,VLLONG=N,VSAMIO=N,MOSIZE=MAX + ICE235I 0 OPTIONS: NULLOUT=RC0 + ICE236I 0 OPTIONS: DYNAPCT=10 ,MOWRK=Y,TUNE=STOR,EXPMAX=MAX ,EXPOLD=50% ,EXPRES=10% + ICE084I 1 VSAM ACCESS METHOD USED FOR DD1 + ICE751I 1 EF-I80637 F0-I90068 E8-I76949 + ICE091I 0 OUTPUT LRECL = 2041, TYPE = F + ICE055I 0 INSERT 0, DELETE 52 + ICE054I 0 RECORDS - IN: 52, OUT: 0 + ICE267I 0 ZSORT ACCELERATOR PATH NOT USED RSN=193 + ICE052I 0 END OF DFSORT + 1ICE600I 0 DFSORT ICETOOL UTILITY RUN STARTED + + ICE650I 0 VISIT http://www.ibm.com/storage/dfsort FOR ICETOOL PAPERS, EXAMPLES AND MORE + + ICE632I 0 SOURCE FOR ICETOOL STATEMENTS: TOOLIN + + + ICE630I 0 MODE IN EFFECT: STOP + + COUNT FROM(DD1) + ICE627I 0 DFSORT CALL 0001 FOR COPY FROM DD1 TO E35 EXIT COMPLETED + ICE628I 0 RECORD COUNT: 0000000000000{0} + ICE602I 0 OPERATION RETURN CODE: 00 + + + ICE601I 0 DFSORT ICETOOL UTILITY RUN ENDED - RETURN CODE: 00" + """.format(count) + + +def ICETOOL_stderr(): + return """ + BGYSC0307I Program: Arguments: <> + BGYSC0308I DDNames: + BGYSC0312I TOOLIN=DATA.SET.PATH + BGYSC0310I SHOWDEF=* + BGYSC0310I DFSMSG=* + BGYSC0310I TOOLMSG=* + BGYSC0312I DD1=DATA.SET.PATH + BGYSC0310I SYSPRINT=* + BGYSC0303I Dataset allocation succeeded for TOOLIN=DATA.SET.PATH + BGYSC0304I Dynamic allocation succeeded for SHOWDEF (temporary dataset for console) + BGYSC0304I Dynamic allocation succeeded for DFSMSG (temporary dataset for console) + BGYSC0304I Dynamic allocation succeeded for TOOLMSG (temporary dataset for console) + BGYSC0303I Dataset allocation succeeded for DD1=DATA.SET.PATH + BGYSC0304I Dynamic allocation succeeded for SYSPRINT (temporary dataset for console) + BGYSC0328I OS Load program ICETOOL + BGYSC0320I Addressing mode: AMODE31 + BGYSC0327I Attach Exit code: 0 from ICETOOL + BGYSC0338I Dataset free succeeded for TOOLIN=DATA.SET.PATH + BGYSC0356I Console free succeeded for SHOWDEF + BGYSC0356I Console free succeeded for DFSMSG + BGYSC0356I Console free succeeded for TOOLMSG + BGYSC0338I Dataset free succeeded for DD1=DATA.SET.PATH + BGYSC0356I Console free succeeded for SYSPRINT + """ + + +def RMUTL_get_run_name(run): + return "DFHRMUTL - Get current catalog - Run {0}".format(run) + + +def RMUTL_update_run_name(run): + return "DFHRMUTL - Updating autostart override - Run {0}".format(run) + + +def RMUTL_stdout(auto_start, next_start): + return """ + ===DFHRMUTL CICS RECOVERY MANAGER BATCH UTILITY=== + + ---DFHRMUTL: DFHGCD information + No recovery manager record found. GCD assumed empty. + + ---DFHRMUTL: DFHGCD updated information + Recovery manager auto-start override : {0} + Recovery manager next start type : {1} + + Note: a CICS system that was shutdown warm, and which + has no indoubt, commit-failed or backout-failed Units + Of Work keypointed at that time, can safely be restarted + cold without loss of data integrity. + """.format(auto_start, next_start) + + +def RMUTL_stderr(data_set_name): + return """ + BGYSC0345I STEPLIB set to TEST.CICS.INSTALL.SDFHLOAD + BGYSC0346I Nested invocation <'mvscmdhelper' '-v' '--pgm=DFHRMUTL' '--dfhgcd={0}' '--sysin=TMP.P3621188.T0403704.C0000000' '--sysprint=*'> + BGYSC0307I Program: Arguments: <> + BGYSC0308I DDNames: + BGYSC0310I SYSPRINT=* + BGYSC0312I SYSIN=TMP.P3621188.T0403704.C0000000 + BGYSC0312I DFHGCD={0} + BGYSC0304I Dynamic allocation succeeded for SYSPRINT (temporary dataset for console) + BGYSC0303I Dataset allocation succeeded for SYSIN=TMP.P3621188.T0403704.C0000000 + BGYSC0303I Dataset allocation succeeded for DFHGCD={0} + BGYSC0328I OS Load program DFHRMUTL + BGYSC0320I Addressing mode: AMODE31 + BGYSC0327I Attach Exit code: 0 from DFHRMUTL + BGYSC0356I Console free succeeded for SYSPRINT + BGYSC0338I Dataset free succeeded for SYSIN=TMP.P3621188.T0403704.C0000000 + BGYSC0338I Dataset free succeeded for DFHGCD={0} + """.format(data_set_name) + + +def CCUTL_name(): + return "DFHCCUTL - Initialise Local Catalog" + + +def CCUTL_stderr(data_set_name): + return """ + BGYSC0345I STEPLIB set to TEST.CICS.INSTALL.SDFHLOAD + BGYSC0346I Nested invocation <'mvscmdhelper' '-v' '--pgm=DFHCCUTL' '--sysprint=*' '--sysudump=*' '--dfhlcd={0},SHR'> + BGYSC0307I Program: Arguments: <> + BGYSC0308I DDNames: + BGYSC0312I DFHLCD={0} + BGYSC0310I SYSUDUMP=* + BGYSC0310I SYSPRINT=* + BGYSC0303I Dataset allocation succeeded for DFHLCD={0} + BGYSC0304I Dynamic allocation succeeded for SYSUDUMP (temporary dataset for console) + BGYSC0304I Dynamic allocation succeeded for SYSPRINT (temporary dataset for console) + BGYSC0328I OS Load program DFHCCUTL + BGYSC0320I Addressing mode: AMODE24 + BGYSC0327I Attach Exit code: 0 from DFHCCUTL + BGYSC0338I Dataset free succeeded for DFHLCD={0} + BGYSC0356I Console free succeeded for SYSUDUMP + BGYSC0356I Console free succeeded for SYSPRINT + """.format(data_set_name) + + +def CSDUP_name(): + return "Run DFHCSDUP" + + +def CSDUP_initialize_stdout(data_set_name): + return """ + *************************************************************************** + ** CICS RDO OFF-LINE UTILITY PROGRAM DFHCSDUP RELEASE:0750 PTF:I0602193.** + *************************************************************************** + + + INITIALIZE + + DFH5120 I PRIMARY CSD OPENED; DDNAME: DFHCSD - DSNAME: {0} + DFH5280 I PROCESSING DEFINITIONS FROM LIBRARY MEMBER DFHCURDI + DFH5131 I LIST DFHLIST CREATED. + DFH5135 I GROUP DFHDCTG ADDED TO LIST + """.format(data_set_name) + + +def CSDUP_stderr(data_set_name): + return """ + BGYSC0345I STEPLIB set to TEST.CICS.INSTALL.SDFHLOAD + BGYSC0346I Nested invocation <'mvscmdhelper' '-v' '--pgm=DFHCSDUP' '--dfhcsd={0},SHR' '--sysprint=*' '--sysudump=*' '--sysin={0}'> + BGYSC0307I Program: Arguments: <> + BGYSC0308I DDNames: + BGYSC0312I SYSIN={0} + BGYSC0310I SYSUDUMP=* + BGYSC0310I SYSPRINT=* + BGYSC0312I DFHCSD={0} + BGYSC0303I Dataset allocation succeeded for SYSIN={0} + BGYSC0304I Dynamic allocation succeeded for SYSUDUMP (temporary dataset for console) + BGYSC0304I Dynamic allocation succeeded for SYSPRINT (temporary dataset for console) + BGYSC0303I Dataset allocation succeeded for DFHCSD={0} + BGYSC0328I OS Load program DFHCSDUP + BGYSC0320I Addressing mode: AMODE24 + BGYSC0327I Attach Exit code: 0 from DFHCSDUP + BGYSC0338I Dataset free succeeded for SYSIN={0} + BGYSC0356I Console free succeeded for SYSUDUMP + BGYSC0356I Console free succeeded for SYSPRINT + BGYSC0338I Dataset free succeeded for DFHCSD={0} + """.format(data_set_name) + + +def CSDUP_add_group_stdout(data_set_name): + return """ + *************************************************************************** + ** CICS RDO OFF-LINE UTILITY PROGRAM DFHCSDUP RELEASE:0750 PTF:I1302193.** + *************************************************************************** + + ADD GROUP(DFHTERMC) LIST(DFHLIST1) + + DFH5120 I PRIMARY CSD OPENED; DDNAME: DFHCSD - DSNAME: {0} + DFH5131 I LIST DFHLIST1 CREATED. + DFH5135 I GROUP DFHTERMC ADDED TO LIST DFHLIST1 + DFH5101 I ADD COMMAND EXECUTED SUCCESSFULLY. + DFH5123 I PRIMARY CSD CLOSED; DDNAME: DFHCSD - DSNAME: {0} + + DFH5107 I COMMANDS EXECUTED SUCCESSFULLY: 1 COMMANDS GIVING WARNING(S): 0 COMMANDS IN ERROR: 0 + DFH5108 I COMMANDS NOT EXECUTED AFTER ERROR(S): 0 + DFH5109 I END OF DFHCSDUP UTILITY JOB. HIGHEST RETURN CODE WAS: 0 + """.format(data_set_name) + + +def read_data_set_content_run_name(data_set_name): + return "Read data set {0}".format(data_set_name) + + +def get_sample_generated_JCL_args(data_set_name, state): + return { + "state": state, + "applid": "APPLID", + "region_data_sets": { + 'dfhauxt': {"dsn": "test.dfhauxt"}, + 'dfhbuxt': {"dsn": "test.dfhbuxt"}, + 'dfhcsd': {"dsn": "test.dfhcsd"}, + 'dfhgcd': {"dsn": "test.dfhgcd"}, + 'dfhintra': {"dsn": "test.dfhintra"}, + 'dfhlcd': {"dsn": "test.dfhlcd"}, + 'dfhlrq': {"dsn": "test.dfhlrq"}, + 'dfhtemp': {"dsn": "test.dfhtemp"}, + 'dfhdmpa': {"dsn": "test.dfhdmpa"}, + 'dfhdmpb': {"dsn": "test.dfhdmpb"}, + "dfhstart": {"dsn": data_set_name} + }, + "cics_data_sets": { + "sdfhload": "test.sdfhload", + "sdfhauth": "test.sdfhauth", + "sdfhlic": "test.sdfhlic", + }, + "le_data_sets": { + "sceecics": "test.sceecics", + "sceerun": "test.sceerun", + "sceerun2": "test.sceerun2", + }, + "cpsm_data_sets": { + "seyuauth": "test.seyuauth", + "seyuload": "test.seyuload", + }, + "steplib": { + "top_data_sets": ["some.top.lib"] + }, + "dfhrpl": { + "top_data_sets": ["another.top.lib"] + }, + "job_parameters": { + "region": "0M" + }, + "sit_parameters": { + "start": "AUTO", + "tcpip": "NO" + } + } + + +def get_sample_generated_JCL(): + return dedent(""" + //APPLID JOB REGION=0M + // EXEC PGM=DFHSIP,PARM=SI + //STEPLIB DD DSN=SOME.TOP.LIB,DISP=SHR + // DD DSN=TEST.SDFHAUTH,DISP=SHR + // DD DSN=TEST.SDFHLIC,DISP=SHR + // DD DSN=TEST.SEYUAUTH,DISP=SHR + // DD DSN=TEST.SCEERUN,DISP=SHR + // DD DSN=TEST.SCEERUN2,DISP=SHR + //DFHRPL DD DSN=ANOTHER.TOP.LIB,DISP=SHR + // DD DSN=TEST.SDFHLOAD,DISP=SHR + // DD DSN=TEST.SEYULOAD,DISP=SHR + // DD DSN=TEST.SCEECICS,DISP=SHR + // DD DSN=TEST.SCEERUN,DISP=SHR + // DD DSN=TEST.SCEERUN2,DISP=SHR + //DFHAUXT DD DSN=TEST.DFHAUXT,DISP=SHR + //DFHBUXT DD DSN=TEST.DFHBUXT,DISP=SHR + //DFHCSD DD DSN=TEST.DFHCSD,DISP=SHR + //DFHGCD DD DSN=TEST.DFHGCD,DISP=SHR + //DFHINTRA DD DSN=TEST.DFHINTRA,DISP=SHR + //DFHLCD DD DSN=TEST.DFHLCD,DISP=SHR + //DFHLRQ DD DSN=TEST.DFHLRQ,DISP=SHR + //DFHTEMP DD DSN=TEST.DFHTEMP,DISP=SHR + //DFHDMPA DD DSN=TEST.DFHDMPA,DISP=SHR + //DFHDMPB DD DSN=TEST.DFHDMPB,DISP=SHR + //CEEMSG DD SYSOUT=* + //CEEOUT DD SYSOUT=* + //MSGUSR DD SYSOUT=* + //SYSPRINT DD SYSOUT=* + //SYSUDUMP DD SYSOUT=* + //SYSABEND DD SYSOUT=* + //SYSOUT DD SYSOUT=* + //DFHCXRF DD SYSOUT=* + //LOGUSR DD SYSOUT=* + //SYSIN DD * + START=AUTO + TCPIP=NO + APPLID=APPLID + /* + //""").lstrip() diff --git a/tests/unit/helpers/stop_action_helper.py b/tests/unit/helpers/stop_action_helper.py new file mode 100644 index 00000000..9da71874 --- /dev/null +++ b/tests/unit/helpers/stop_action_helper.py @@ -0,0 +1,128 @@ +CONSOLE_UNDEFINED = "UNDEFINED" +CONSOLE_AUTOINSTALL_FAIL = "AUTOINSTALL" + + +def get_operator_shutdown_response(console=None): + content_msg = "MV2C 2024128 16:01:05.00 " + if console == CONSOLE_UNDEFINED: + content_msg += "+DFHAC2015 AN1234 Console ANSI0000 has not been defined to CICS. Input is ignored." + elif console == CONSOLE_AUTOINSTALL_FAIL: + content_msg += ( + "+DFHAC2032 AN1234 CICS autoinstall for console ANSI0000 has failed." + ) + + return { + "changed": True, + "cmd": "MODIFY AN1234,CEMT PERFORM SHUTDOWN", + "content": [ + "MV2C 2024128 16:01:05.00 ISF031I CONSOLE ANSI0000 ACTIVATED", + "MV2C 2024128 16:01:05.00 -MODIFY AN1234,CEMT PERFORM SHUTDOWN ", + content_msg, + ], + "elapsed": 1.1, + "invocation": { + "module_args": { + "cmd": "MODIFY AN1234,CEMT PERFORM SHUTDOWN", + "verbose": False, + "wait_time_s": 1, + } + }, + "wait_time_s": 1, + } + + +def get_tso_status_response( + full_response=True, + jobname="LINKJOB", + status_line=True, + running=1, + stopped=1, + command_responses=1, + running_job_id="JOB12345", + stopped_job_id="JOB98765", +): + content = [] + if status_line: + content.append("STATUS {0}".format(jobname)) + for i in range(running): + content.append( + "IKJ56211I JOB {0}({1}) EXECUTING".format(jobname, running_job_id) + ) + for i in range(stopped): + content.append( + "IKJ56192I JOB {0}({1}) ON OUTPUT QUEUE".format(jobname, stopped_job_id) + ) + + full = {"output": []} + for i in range(command_responses): + full["output"].append( + { + "command": "STATUS LINKJOB", + "content": content, + "rc": 0, + "max_rc": 0, + "lines": 4, + "failed": False, + } + ) + + return full if full_response else content + + +def get_job_query_result( + full_response=True, + jobs=1, + jobname="AN1234", + failed=False, + message=None, + no_jobs_found=False, +): + job_list = [] + + if no_jobs_found: + job_list.append( + { + "asid": "167", + "creation_date": "2024-05-07", + "creation_time": "6:28:42", + "job_class": "A", + "job_id": "JOB12345", + "job_name": "*", + "owner": "ANSIBIT", + "priority": "7", + "program_name": "?", + "queue_position": "0", + "ret_code": {"msg": "JOB NOT FOUND"}, + "subsystem": "", + "svc_class": "BATCH", + "system": "", + } + ) + else: + for i in range(jobs): + job_list.append( + { + "asid": "167", + "creation_date": "2024-05-07", + "creation_time": "6:28:42", + "job_class": "A", + "job_id": "JOB12345", + "job_name": jobname, + "owner": "ANSIBIT", + "priority": "7", + "program_name": "?", + "queue_position": "0", + "ret_code": None, + "subsystem": "", + "svc_class": "BATCH", + "system": "", + } + ) + + full = { + "jobs": job_list, + "failed": failed, + } + if message is not None: + full.update({"message": message}) + return full if full_response else job_list diff --git a/tests/unit/module_utils/__init__.py b/tests/unit/module_utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/module_utils/test_aux_trace.py b/tests/unit/module_utils/test_aux_trace.py new file mode 100644 index 00000000..98a32697 --- /dev/null +++ b/tests/unit/module_utils/test_aux_trace.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +import pytest +import sys +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import MEGABYTES +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import PYTHON_LANGUAGE_FEATURES_MESSAGE +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._aux_trace import _build_seq_data_set_definition_aux_trace +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.aux_trace import SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT + + +@pytest.mark.skipif(sys.version_info.major < 3, + reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_aux_trace_definition_megabytes(): + data_set = dict( + name="ANSI.M.DFHAUXT", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + + definition = _build_seq_data_set_definition_aux_trace(data_set) + test_definition = DatasetDefinition( + dataset_name="ANSI.M.DFHAUXT", + block_size=4096, + record_length=4096, + record_format="FB", + disposition="NEW", + normal_disposition="CATALOG", + conditional_disposition="DELETE", + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + primary_unit="M", + secondary_unit="M", + type="SEQ" + ) + + assert definition.__dict__ == test_definition.__dict__ + + +@pytest.mark.skipif(sys.version_info.major < 3, + reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_aux_trace_definition_volumes(): + data_set = dict( + name="ANSI.M.DFHAUXT", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1"] + ) + + definition = _build_seq_data_set_definition_aux_trace(data_set) + test_definition = DatasetDefinition( + dataset_name="ANSI.M.DFHAUXT", + block_size=4096, + record_length=4096, + record_format="FB", + disposition="NEW", + normal_disposition="CATALOG", + conditional_disposition="DELETE", + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + primary_unit="M", + secondary_unit="M", + type="SEQ", + volumes=["vserv1"] + ) + + assert definition.__dict__ == test_definition.__dict__ diff --git a/tests/unit/module_utils/test_auxiliary_temp.py b/tests/unit/module_utils/test_auxiliary_temp.py new file mode 100644 index 00000000..b358e327 --- /dev/null +++ b/tests/unit/module_utils/test_auxiliary_temp.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _aux_temp_storage as auxiliary_temp +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import CYLINDERS, MEGABYTES +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import PYTHON_LANGUAGE_FEATURES_MESSAGE +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.aux_temp_storage import SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT +import pytest +import sys + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_megabytes(): + dataset = dict( + name="ANSI.CYLS.DFHTEMP", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_intra = data_set_utils._build_idcams_define_cmd( + auxiliary_temp._get_idcams_cmd_temp(dataset) + ) + assert ( + idcams_cmd_intra + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHTEMP) - + MEGABYTES(200 10) - + RECORDSIZE(4089 4089) - + NONINDEXED - + CONTROLINTERVALSIZE(4096) - + SHAREOPTIONS(2 3)) - + DATA (NAME(ANSI.CYLS.DFHTEMP.DATA) - + UNIQUE) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_cylinders(): + dataset = dict( + name="ANSI.CYLS.DFHTEMP", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_intra = data_set_utils._build_idcams_define_cmd( + auxiliary_temp._get_idcams_cmd_temp(dataset) + ) + assert ( + idcams_cmd_intra + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHTEMP) - + CYLINDERS(200 10) - + RECORDSIZE(4089 4089) - + NONINDEXED - + CONTROLINTERVALSIZE(4096) - + SHAREOPTIONS(2 3)) - + DATA (NAME(ANSI.CYLS.DFHTEMP.DATA) - + UNIQUE) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_volumes(): + dataset = dict( + name="ANSI.CYLS.DFHTEMP", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1"] + ) + idcams_cmd_intra = data_set_utils._build_idcams_define_cmd( + auxiliary_temp._get_idcams_cmd_temp(dataset) + ) + assert ( + idcams_cmd_intra + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHTEMP) - + CYLINDERS(200 10) - + RECORDSIZE(4089 4089) - + NONINDEXED - + CONTROLINTERVALSIZE(4096) - + SHAREOPTIONS(2 3) - + VOLUMES(vserv1)) - + DATA (NAME(ANSI.CYLS.DFHTEMP.DATA) - + UNIQUE) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_multiple_volumes(): + dataset = dict( + name="ANSI.CYLS.DFHTEMP", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1", "vserv2"] + ) + idcams_cmd_intra = data_set_utils._build_idcams_define_cmd( + auxiliary_temp._get_idcams_cmd_temp(dataset) + ) + assert ( + idcams_cmd_intra + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHTEMP) - + CYLINDERS(200 10) - + RECORDSIZE(4089 4089) - + NONINDEXED - + CONTROLINTERVALSIZE(4096) - + SHAREOPTIONS(2 3) - + VOLUMES(vserv1 vserv2)) - + DATA (NAME(ANSI.CYLS.DFHTEMP.DATA) - + UNIQUE) + """ + ) diff --git a/tests/unit/module_utils/test_cicsgetversion.py b/tests/unit/module_utils/test_cicsgetversion.py new file mode 100644 index 00000000..c77e026d --- /dev/null +++ b/tests/unit/module_utils/test_cicsgetversion.py @@ -0,0 +1,81 @@ +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _cicsgetversion as cicsgetversion +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +def test_get_dataset_member_version_record(): + stdout = ''' + * * + * STATUS = 7.1.0 * + * * + *---------------------------------------------------------------* + * * + * DESCRIPTION : * + + ''' + cicsgetversion.read = MagicMock(return_value=stdout) + + ds = 'CICS.TEST.PATH' + result = cicsgetversion.get_dataset_member_version_record(ds) + assert result == '7.1.0' + + +def test_get_dataset_member_version_record_not_present(): + stdout = ''' + * * + * STATUS = * + * * + *---------------------------------------------------------------* + * * + * DESCRIPTION : * + + ''' + cicsgetversion.read = MagicMock(return_value=stdout) + + error_raised = False + try: + cicsgetversion.get_dataset_member_version_record('CICS.TEST.PATH') + except Exception as e: + print((repr(e))) + error_raised = True + assert error_raised + + +def test_get_dataset_member_version_record_too_long(): + stdout = ''' + * * + * STATUS = 1234647473636738383762728 * + * * + *---------------------------------------------------------------* + * * + * DESCRIPTION : * + + ''' + cicsgetversion.read = MagicMock(return_value=stdout) + + error_raised = False + try: + cicsgetversion.get_dataset_member_version_record('CICS.TEST.PATH') + except Exception as e: + print((repr(e))) + error_raised = True + assert error_raised + + +def test_get_dataset_member_version_record_blank(): + stdout = ''' + ''' + cicsgetversion.read = MagicMock(return_value=stdout) + + error_raised = False + try: + cicsgetversion.get_dataset_member_version_record('CICS.TEST.PATH') + except Exception as e: + print((repr(e))) + error_raised = True + assert error_raised diff --git a/tests/unit/module_utils/test_csd.py b/tests/unit/module_utils/test_csd.py new file mode 100644 index 00000000..ae864bb4 --- /dev/null +++ b/tests/unit/module_utils/test_csd.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import StdinDefinition +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import CYLINDERS, MEGABYTES +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException, _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + CSDUP_name, + CSDUP_stderr, + CSDUP_initialize_stdout +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.csd import SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse + +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _csd as csd +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +import pytest +import sys + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +NAME = "ANSI.TEST.DFHCSD" + + +def setUp(): + StdinDefinition.__init__ = MagicMock(return_value=None) + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_megabytes(): + csd_data_set = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_csd = data_set_utils._build_idcams_define_cmd(csd._get_idcams_cmd_csd(csd_data_set)) + assert idcams_cmd_csd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHCSD) - + MEGABYTES(4 1) - + RECORDSIZE(200 2000) - + INDEXED - + KEYS(22 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE) - + DATA (NAME(ANSI.TEST.DFHCSD.DATA) - + CONTROLINTERVALSIZE(8192)) - + INDEX (NAME(ANSI.TEST.DFHCSD.INDEX)) + ''' + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_cylinders(): + csd_data_set = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_csd = data_set_utils._build_idcams_define_cmd(csd._get_idcams_cmd_csd(csd_data_set)) + assert idcams_cmd_csd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHCSD) - + CYLINDERS(4 1) - + RECORDSIZE(200 2000) - + INDEXED - + KEYS(22 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE) - + DATA (NAME(ANSI.TEST.DFHCSD.DATA) - + CONTROLINTERVALSIZE(8192)) - + INDEX (NAME(ANSI.TEST.DFHCSD.INDEX)) + ''' + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_backout_recovery(): + csd_data_set = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + log="UNDO" + ) + idcams_cmd_csd = data_set_utils._build_idcams_define_cmd(csd._get_idcams_cmd_csd(csd_data_set)) + assert idcams_cmd_csd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHCSD) - + MEGABYTES(4 1) - + RECORDSIZE(200 2000) - + INDEXED - + KEYS(22 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + LOG(UNDO)) - + DATA (NAME(ANSI.TEST.DFHCSD.DATA) - + CONTROLINTERVALSIZE(8192)) - + INDEX (NAME(ANSI.TEST.DFHCSD.INDEX)) + ''' + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_forward_recovery(): + csd_data_set = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + log="ALL", + logstream_id="TEST.DATA.LOG.STREAM" + ) + idcams_cmd_csd = data_set_utils._build_idcams_define_cmd(csd._get_idcams_cmd_csd(csd_data_set)) + assert idcams_cmd_csd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHCSD) - + MEGABYTES(4 1) - + RECORDSIZE(200 2000) - + INDEXED - + KEYS(22 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + LOG(ALL) - + LOGSTREAMID(TEST.DATA.LOG.STREAM)) - + DATA (NAME(ANSI.TEST.DFHCSD.DATA) - + CONTROLINTERVALSIZE(8192)) - + INDEX (NAME(ANSI.TEST.DFHCSD.INDEX)) + ''' + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_backout_recovery_logstream_id_provided(): + csd_data_set = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + log="UNDO", + logstream_id="TEST.DATA.LOG.STREAM" + ) + idcams_cmd_csd = data_set_utils._build_idcams_define_cmd(csd._get_idcams_cmd_csd(csd_data_set)) + assert idcams_cmd_csd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHCSD) - + MEGABYTES(4 1) - + RECORDSIZE(200 2000) - + INDEXED - + KEYS(22 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + LOG(UNDO)) - + DATA (NAME(ANSI.TEST.DFHCSD.DATA) - + CONTROLINTERVALSIZE(8192)) - + INDEX (NAME(ANSI.TEST.DFHCSD.INDEX)) + ''' + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_nonrecoverable(): + csd_data_set = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + log="NONE" + ) + idcams_cmd_csd = data_set_utils._build_idcams_define_cmd(csd._get_idcams_cmd_csd(csd_data_set)) + assert idcams_cmd_csd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHCSD) - + MEGABYTES(4 1) - + RECORDSIZE(200 2000) - + INDEXED - + KEYS(22 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + LOG(NONE)) - + DATA (NAME(ANSI.TEST.DFHCSD.DATA) - + CONTROLINTERVALSIZE(8192)) - + INDEX (NAME(ANSI.TEST.DFHCSD.INDEX)) + ''' + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_volumes(): + csd_data_set = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1"] + ) + idcams_cmd_csd = data_set_utils._build_idcams_define_cmd(csd._get_idcams_cmd_csd(csd_data_set)) + assert idcams_cmd_csd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHCSD) - + MEGABYTES(4 1) - + RECORDSIZE(200 2000) - + INDEXED - + KEYS(22 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + VOLUMES(vserv1)) - + DATA (NAME(ANSI.TEST.DFHCSD.DATA) - + CONTROLINTERVALSIZE(8192)) - + INDEX (NAME(ANSI.TEST.DFHCSD.INDEX)) + ''' + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_multiple_volumes(): + csd_data_set = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1", "vserv2"] + ) + idcams_cmd_csd = data_set_utils._build_idcams_define_cmd(csd._get_idcams_cmd_csd(csd_data_set)) + assert idcams_cmd_csd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHCSD) - + MEGABYTES(4 1) - + RECORDSIZE(200 2000) - + INDEXED - + KEYS(22 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + VOLUMES(vserv1 vserv2)) - + DATA (NAME(ANSI.TEST.DFHCSD.DATA) - + CONTROLINTERVALSIZE(8192)) - + INDEX (NAME(ANSI.TEST.DFHCSD.INDEX)) + ''' + + +def test_csdup_response(): + setUp() + csd_input = { + "exists": False, + "name": NAME, + "primary": 5, + "secondary": 1, + "unit": "M", + "state": "initial", + "vsam": False, + "sdfhload": "CICSTS.IN56.SDFHLOAD", + } + + expected_executions = [ + _execution(name=CSDUP_name(), rc=0, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME)), + ] + + csd._execute_dfhcsdup = MagicMock(return_value=MVSCmdResponse(rc=0, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME))) + executions = csd._run_dfhcsdup(csd_input, csd._get_csdup_initilize_cmd()) + + assert executions == expected_executions + + +def test_bad_csdup_response(): + setUp() + csd_input = { + "exists": False, + "name": NAME, + "primary": 5, + "secondary": 1, + "unit": "M", + "state": "initial", + "vsam": False, + "sdfhload": "CICSTS.IN56.SDFHLOAD", + } + + expected_executions = [ + _execution(name=CSDUP_name(), rc=99, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME)), + ] + + csd._execute_dfhcsdup = MagicMock(return_value=MVSCmdResponse(rc=99, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME))) + + try: + csd._run_dfhcsdup(csd_input, csd._get_csdup_initilize_cmd()) + except MVSExecutionException as e: + error_message = e.message + executions = e.executions + + assert error_message == "DFHCSDUP failed with RC 99" + assert executions == expected_executions + else: + assert False + + +def test_warning_csdup_response(): + setUp() + csd_input = { + "exists": False, + "name": NAME, + "primary": 5, + "secondary": 1, + "unit": "M", + "state": "initial", + "vsam": False, + "sdfhload": "CICSTS.IN56.SDFHLOAD", + } + + expected_executions = [ + _execution(name=CSDUP_name(), rc=4, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME)), + ] + + csd._execute_dfhcsdup = MagicMock(return_value=MVSCmdResponse(rc=4, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME))) + + executions = csd._run_dfhcsdup(csd_input, csd._get_csdup_initilize_cmd()) + assert executions == expected_executions + + +def test_rc_7_csdup_response(): + setUp() + csd_input = { + "exists": False, + "name": NAME, + "primary": 5, + "secondary": 1, + "unit": "M", + "state": "initial", + "vsam": False, + "sdfhload": "CICSTS.IN56.SDFHLOAD", + } + + expected_executions = [ + _execution(name=CSDUP_name(), rc=7, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME)), + ] + + csd._execute_dfhcsdup = MagicMock(return_value=MVSCmdResponse(rc=7, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME))) + + executions = csd._run_dfhcsdup(csd_input, csd._get_csdup_initilize_cmd()) + assert executions == expected_executions + + +def test_rc_8_csdup_response(): + setUp() + csd_input = { + "exists": False, + "name": NAME, + "primary": 5, + "secondary": 1, + "unit": "M", + "state": "initial", + "vsam": False, + "sdfhload": "CICSTS.IN56.SDFHLOAD", + } + + expected_executions = [ + _execution(name=CSDUP_name(), rc=8, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME)), + ] + + csd._execute_dfhcsdup = MagicMock(return_value=MVSCmdResponse(rc=8, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME))) + + try: + csd._run_dfhcsdup(csd_input, csd._get_csdup_initilize_cmd()) + except MVSExecutionException as e: + error_message = e.message + executions = e.executions + + assert error_message == "DFHCSDUP failed with RC 8" + assert executions == expected_executions + else: + assert False diff --git a/tests/unit/module_utils/test_dataset_utils.py b/tests/unit/module_utils/test_dataset_utils.py new file mode 100644 index 00000000..a612a8b2 --- /dev/null +++ b/tests/unit/module_utils/test_dataset_utils.py @@ -0,0 +1,702 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + IDCAMS_create_already_exists_stdout, + IDCAMS_create_stdout, + IDCAMS_delete_not_found, + IDCAMS_delete, + IDCAMS_run_cmd, + IEFBR14_get_run_name, + LISTDS_data_set, + LISTDS_data_set_doesnt_exist, + LISTDS_member_doesnt_exist, + LISTDS_run_name +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException, _execution +import pytest +import sys + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +def test_unit_size_m(): + unit = "M" + unit_string = data_set_utils._get_dataset_size_unit(unit) + assert unit_string == "MEGABYTES" + + +def test_unit_size_k(): + unit = "K" + unit_string = data_set_utils._get_dataset_size_unit(unit) + assert unit_string == "KILOBYTES" + + +def test_unit_size_cyl(): + unit = "CYL" + unit_string = data_set_utils._get_dataset_size_unit(unit) + assert unit_string == "CYLINDERS" + + +def test_unit_size_rec(): + unit = "REC" + unit_string = data_set_utils._get_dataset_size_unit(unit) + assert unit_string == "RECORDS" + + +def test_unit_size_trk(): + unit = "TRK" + unit_string = data_set_utils._get_dataset_size_unit(unit) + assert unit_string == "TRACKS" + + +def test_unit_size_bad_unit(): + unit = "FISHES" + unit_string = data_set_utils._get_dataset_size_unit(unit) + assert unit_string == "MEGABYTES" + + +def test_unit_size_empty(): + unit = "" + unit_string = data_set_utils._get_dataset_size_unit(unit) + assert unit_string == "MEGABYTES" + + +def test__run_idcams_create(): + location = "ANSIBIT.CICS.IYTWYD03.DFHGCD" + rc = 0 + stdout = IDCAMS_create_stdout(location) + stderr = "" + data_set_utils._execute_idcams = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + cmd = IDCAMS_run_cmd(location) + + result_exececutions = data_set_utils._run_idcams( + cmd=cmd, + name="Create Catalog", + location=location, + delete=False) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": "IDCAMS - Create Catalog - Run 1", + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + + +def test__run_idcams_create_exists(): + location = "ANSIBIT.CICS.IYTWYD01.DFHGCD" + rc = 12 + stdout = IDCAMS_create_already_exists_stdout(location) + stderr = "" + data_set_utils._execute_idcams = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + cmd = IDCAMS_run_cmd(location) + + result_exececutions = data_set_utils._run_idcams( + cmd=cmd, + name="Create Catalog", + location=location, + delete=False) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": "IDCAMS - Create Catalog - Run 1", + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + + +def test__run_idcams_delete(): + location = "ANSIBIT.CICS.IYTWYD03.DFHGCD" + rc = 0 + stdout = IDCAMS_delete(location) + stderr = "" + data_set_utils._execute_idcams = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + cmd = ''' + DELETE {0} + '''.format(location) + + result_exececutions = data_set_utils._run_idcams( + cmd=cmd, + name="Remove Catalog", + location=location, + delete=True) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": "IDCAMS - Remove Catalog - Run 1", + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + + +def test__run_idcams_delete_no_exist(): + location = "ANSIBIT.CICS.IYTWYD02.DFHGCD" + rc = 8 + stdout = IDCAMS_delete_not_found(location) + stderr = "" + data_set_utils._execute_idcams = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + cmd = ''' + DELETE {0} + '''.format(location) + + result_exececutions = data_set_utils._run_idcams( + cmd=cmd, + name="Remove Catalog", + location=location, + delete=True) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": "IDCAMS - Remove Catalog - Run 1", + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + + +def test__run_idcams_bad_return_code_when_creating(): + location = "ANSIBIT.CICS.IYTWYD02.DFHGCD" + rc = 99 + stdout = IDCAMS_create_stdout(location) + stderr = "" + data_set_utils._execute_idcams = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + cmd = ''' + DEFINE CLUSTER - + (NAME({0}) - + INDEXED - + MEGABYTES(5 1) - + SHR(2) - + FREESPACE(10 10) - + RECORDSIZE(4089 32760) - + REUSE) - + DATA - + (NAME({0}.DATA) - + CONTROLINTERVALSIZE(32768) - + KEYS(52 0)) - + INDEX - + (NAME({0}.INDEX)) + '''.format(location) + + expected_executions = [ + _execution(name="IDCAMS - Create Catalog - Run 1", rc=rc, stdout=stdout, stderr=stderr) + ] + + try: + data_set_utils._run_idcams( + cmd=cmd, + name="Create Catalog", + location=location, + delete=False) + except MVSExecutionException as e: + assert e.message == "RC 99 when creating data set" + assert e.executions == expected_executions + else: + assert False + + +def test__run_idcams_bad_return_code_when_deleting(): + location = "ANSIBIT.CICS.IYTWYD02.DFHGCD" + rc = 99 + stdout = IDCAMS_delete(location) + stderr = "" + data_set_utils._execute_idcams = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + cmd = ''' + DELETE {0} + '''.format(location) + + expected_executions = [ + _execution(name="IDCAMS - Remove Catalog - Run 1", rc=rc, stdout=stdout, stderr=stderr) + ] + + try: + data_set_utils._run_idcams( + cmd=cmd, + name="Remove Catalog", + location=location, + delete=True) + except MVSExecutionException as e: + assert e.message == "RC 99 when deleting data set" + assert e.executions == expected_executions + else: + assert False + + +def test__run_listds_exists_vsam(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + rc = 0 + stdout = LISTDS_data_set(location, "VSAM") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is True + assert ds_org == "VSAM" + + +def test__run_listds_exists_sequential(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + rc = 0 + stdout = LISTDS_data_set(location, "PS") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is True + assert ds_org == "Sequential" + + +def test__run_listds_exists_partitioned(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + rc = 0 + stdout = LISTDS_data_set(location, "PO") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is True + assert ds_org == "Partitioned" + + +def test__run_listds_exists_indexed_sequential(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + rc = 0 + stdout = LISTDS_data_set(location, "IS") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is True + assert ds_org == "Indexed Sequential" + + +def test__run_listds_exists_direct_access(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + rc = 0 + stdout = LISTDS_data_set(location, "DA") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is True + assert ds_org == "Direct Access" + + +def test__run_listds_exists_other(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + rc = 0 + stdout = LISTDS_data_set(location, "??") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is True + assert ds_org == "Other" + + +def test__run_listds_exists_unspecified(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + rc = 0 + stdout = LISTDS_data_set(location, "**") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is True + assert ds_org == "Unspecified" + + +def test__run_listds_exists_unknown(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + rc = 0 + stdout = LISTDS_data_set(location, "NOT_REAL_DSORG") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is True + assert ds_org == "Unspecified" + + +def test__run_listds_bad_rc(): + location = "ANSIBIT.CICS.TESTS.A365D7A.DFHGCD" + name = LISTDS_run_name(1) + rc = 16 + stdout = LISTDS_data_set(location, "VSAM") + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + expected_executions = [_execution(name=name, rc=rc, stdout=stdout, stderr=stderr)] + + try: + data_set_utils._run_listds(location) + except MVSExecutionException as e: + assert e.message == "RC 16 running LISTDS Command" + assert e.executions == expected_executions + else: + assert False + + +def test__run_listds_not_exists(): + location = "ANSIBIT.CICS.TESTS.A294D11B.DFHGaCD" + rc = 8 + stdout = LISTDS_data_set_doesnt_exist(location) + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is False + assert ds_org == "NONE" + + +def test__run_listds_member_not_exists(): + base_ds_name = "ANSIBIT.CICS.TESTS.A294D11B" + member_name = "MEMB" + location = "{0}({1})".format(base_ds_name, member_name) + rc = 4 + stdout = LISTDS_member_doesnt_exist(base_ds_name, member_name) + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + result_exececutions, exists, ds_org = data_set_utils._run_listds(location) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": LISTDS_run_name(1), + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + assert exists is False + assert ds_org == "NONE" + + +def test__run_listds_with_no_zoau_response(): + rc = 0 + stdout = "" + stderr = "" + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + expected_executions = [ + _execution(name=LISTDS_run_name(1), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(2), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(3), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(4), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(5), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(6), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(7), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(8), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(9), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=LISTDS_run_name(10), rc=rc, stdout=stdout, stderr=stderr) + ] + + try: + data_set_utils._run_listds("LOCATION THATS NOT IN STDOUT") + except MVSExecutionException as e: + assert e.message == "LISTDS Command output not recognised" + assert e.executions == expected_executions + else: + assert False + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test__run_iefbr14(): + rc = 0 + stdout = "stdout" + stderr = "stderr" + data_set_utils.MVSCmd.execute = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + definition = DatasetDefinition( + dataset_name="DFHTEST", + block_size=4096, + record_length=4096, + record_format="FB", + disposition="NEW", + normal_disposition="catalog", + conditional_disposition="delete", + primary=15, + secondary=3, + primary_unit="MB", + type="SEQ" + ) + + result_exececutions = data_set_utils._run_iefbr14( + ddname="DFHIEFT", + definition=definition + ) + + assert len(result_exececutions) == 1 + assert result_exececutions[0] == { + "name": "IEFBR14 - DFHIEFT - Run 1", + "rc": rc, + "stdout": stdout, + "stderr": stderr, + } + + +def test__run_iefbr14_bad_rc(): + rc = 99 + stdout = "stdout" + stderr = "stderr" + data_set_utils.MVSCmd.execute = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + definition = DatasetDefinition( + dataset_name="DFHTEST", + block_size=4096, + record_length=4096, + record_format="FB", + disposition="NEW", + normal_disposition="catalog", + conditional_disposition="delete", + primary=15, + secondary=3, + primary_unit="MB", + type="SEQ" + ) + + expected_executions = [{ + "name": "IEFBR14 - DFHIEFT - Run 1", + "rc": rc, + "stdout": stdout, + "stderr": stderr, + }] + + try: + data_set_utils._run_iefbr14( + ddname="DFHIEFT", + definition=definition + ) + except MVSExecutionException as e: + assert e.message == "RC {0} when creating sequential data set".format(99) + assert e.executions == expected_executions + else: + assert False + + +def test__run_iefbr14_no_response(): + rc = 0 + stdout = "" + stderr = "" + data_set_utils.MVSCmd.execute = MagicMock(return_value=MVSCmdResponse(rc, stdout, stderr)) + + definition = DatasetDefinition( + dataset_name="DFHTEST", + block_size=4096, + record_length=4096, + record_format="FB", + disposition="NEW", + normal_disposition="catalog", + conditional_disposition="delete", + primary=15, + secondary=3, + primary_unit="MB", + type="SEQ" + ) + + expected_executions = [ + _execution(name=IEFBR14_get_run_name(1), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(2), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(3), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(4), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(5), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(6), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(7), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(8), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(9), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=IEFBR14_get_run_name(10), rc=rc, stdout=stdout, stderr=stderr) + ] + + try: + data_set_utils._run_iefbr14( + ddname="DFHIEFT", + definition=definition + ) + except MVSExecutionException as e: + assert e.message == "IEFBR14 Command output not recognised" + assert e.executions == expected_executions + else: + assert False + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test__build_idcams_volumes(): + volumes = ["vserv1", "vserv2", "vserv3"] + + assert data_set_utils._build_idcams_volumes(volumes) == " -\n VOLUMES(vserv1 vserv2 vserv3)" + + +def test__read_data_set_content(): + rc = 0 + stdout = "stdout" + stderr = "stderr" + + data_set_name = "TEST.DATA.SET" + data_set_utils._execute_command = MagicMock(return_value=(rc, stdout, stderr)) + result_executions, result_data_set_content = data_set_utils._read_data_set_content(data_set_name) + + assert result_data_set_content == stdout + assert result_executions[0] == { + "name": "Read data set {0}".format(data_set_name), + "rc": 0, + "stdout": stdout, + "stderr": stderr + } + + +def test__read_data_set_content_bad_rc(): + rc = 99 + stdout = "stdout" + stderr = "stderr" + + data_set_name = "TEST.DATA.SET" + data_set_utils._execute_command = MagicMock(return_value=(rc, stdout, stderr)) + + expected_executions = [{ + "name": "Read data set {0}".format(data_set_name), + "rc": rc, + "stdout": stdout, + "stderr": stderr + }] + + try: + data_set_utils._read_data_set_content(data_set_name) + except MVSExecutionException as e: + assert e.message == "RC {0} when reading content from data set {1}".format(rc, data_set_name) + assert e.executions == expected_executions + else: + assert False + + +def test__write_jcl_to_data_set(): + data_set_name = "TEST.DATA.SET" + jcl = "" + + rc = 0 + stdout = "" + stderr = "" + data_set_utils._execute_command = MagicMock(return_value=(rc, stdout, stderr)) + + expected_executions = [{ + "name": "Copy JCL contents to data set", + "rc": 0, + "stdout": "", + "stderr": "" + }] + + executions = data_set_utils._write_jcl_to_data_set(jcl, data_set_name) + + assert expected_executions == executions + + +def test__write_jcl_to_data_set_fail(): + data_set_name = "TEST.DATA.SET" + jcl = "" + rc = 99 + stdout = "cp failed" + stderr = "stderr" + data_set_utils._execute_command = MagicMock(return_value=(rc, stdout, stderr)) + + expected_executions = [{ + "name": "Copy JCL contents to data set", + "rc": 99, + "stdout": "cp failed", + "stderr": "stderr" + }] + + with pytest.raises(MVSExecutionException) as e: + data_set_utils._write_jcl_to_data_set(jcl, data_set_name) + + assert e.value.message == "Failed to copy JCL content to data set" + assert e.value.executions == expected_executions diff --git a/tests/unit/module_utils/test_global_catalog.py b/tests/unit/module_utils/test_global_catalog.py new file mode 100644 index 00000000..2f706c2c --- /dev/null +++ b/tests/unit/module_utils/test_global_catalog.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import CYLINDERS, MEGABYTES + +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + RMUTL_get_run_name, + RMUTL_stdout, + RMUTL_update_run_name +) +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _global_catalog as global_catalog +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import ( + MVSExecutionException, + _execution, +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.global_catalog import SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import ( + MVSCmdResponse, +) +import pytest +import sys + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_megabytes(): + catalog = dict( + name="ANSI.TEST.DFHGCD", + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + autostart_override="", + nextstart="", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_gcd = data_set_utils._build_idcams_define_cmd( + global_catalog._get_idcams_cmd_gcd(catalog) + ) + assert ( + idcams_cmd_gcd + == """ + DEFINE CLUSTER (NAME(ANSI.TEST.DFHGCD) - + MEGABYTES(5 1) - + RECORDSIZE(4089 32760) - + INDEXED - + KEYS(52 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE) - + DATA (NAME(ANSI.TEST.DFHGCD.DATA) - + CONTROLINTERVALSIZE(32768)) - + INDEX (NAME(ANSI.TEST.DFHGCD.INDEX)) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_cylinders(): + catalog = dict( + name="ANSI.CYLS.DFHGCD", + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + autostart_override="", + nextstart="", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_gcd = data_set_utils._build_idcams_define_cmd( + global_catalog._get_idcams_cmd_gcd(catalog) + ) + assert ( + idcams_cmd_gcd + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHGCD) - + CYLINDERS(5 1) - + RECORDSIZE(4089 32760) - + INDEXED - + KEYS(52 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE) - + DATA (NAME(ANSI.CYLS.DFHGCD.DATA) - + CONTROLINTERVALSIZE(32768)) - + INDEX (NAME(ANSI.CYLS.DFHGCD.INDEX)) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_volumes(): + catalog = dict( + name="ANSI.CYLS.DFHGCD", + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + autostart_override="", + nextstart="", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1"] + ) + idcams_cmd_gcd = data_set_utils._build_idcams_define_cmd( + global_catalog._get_idcams_cmd_gcd(catalog) + ) + assert ( + idcams_cmd_gcd + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHGCD) - + CYLINDERS(5 1) - + RECORDSIZE(4089 32760) - + INDEXED - + KEYS(52 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + VOLUMES(vserv1)) - + DATA (NAME(ANSI.CYLS.DFHGCD.DATA) - + CONTROLINTERVALSIZE(32768)) - + INDEX (NAME(ANSI.CYLS.DFHGCD.INDEX)) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_multiple_volumes(): + catalog = dict( + name="ANSI.CYLS.DFHGCD", + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + autostart_override="", + nextstart="", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1", "vserv2"] + ) + idcams_cmd_gcd = data_set_utils._build_idcams_define_cmd( + global_catalog._get_idcams_cmd_gcd(catalog) + ) + assert ( + idcams_cmd_gcd + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHGCD) - + CYLINDERS(5 1) - + RECORDSIZE(4089 32760) - + INDEXED - + KEYS(52 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + VOLUMES(vserv1 vserv2)) - + DATA (NAME(ANSI.CYLS.DFHGCD.DATA) - + CONTROLINTERVALSIZE(32768)) - + INDEX (NAME(ANSI.CYLS.DFHGCD.INDEX)) + """ + ) + + +def test_global_catalog_get_records_autoinit_unknown(): + stdout = RMUTL_stdout("AUTOINIT", "UNKNOWN") + resp = global_catalog._get_catalog_records(stdout=stdout) + assert resp == ("AUTOINIT", "UNKNOWN") + + +def test_global_catalog_get_records_autoasis_emergency(): + stdout = RMUTL_stdout("AUTOASIS", "EMERGENCY") + resp = global_catalog._get_catalog_records(stdout=stdout) + assert resp == ("AUTOASIS", "EMERGENCY") + + +def test_global_catalog_get_records_autocold_emergency(): + stdout = RMUTL_stdout("AUTOCOLD", "EMERGENCY") + resp = global_catalog._get_catalog_records(stdout=stdout) + assert resp == ("AUTOCOLD", "EMERGENCY") + + +def test_global_catalog_run_rmutl_with_cmd(): + executions = [ + _execution( + name=RMUTL_update_run_name(1), + rc=0, + stdout="", + stderr="", + ) + ] + global_catalog.MVSCmd.execute = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout="", stderr="") + ) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + result = global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD", cmd="HI" + ) + + assert result == executions + + +def test_global_catalog_run_rmutl_with_cmd_and_failure(): + executions = [ + _execution( + name=RMUTL_update_run_name(1), + rc=16, + stdout=" ABC \n REASON: X'A8'", + stderr="", + ), + _execution( + name=RMUTL_update_run_name(2), + rc=0, + stdout="", + stderr="", + ), + ] + global_catalog.MVSCmd.execute = MagicMock( + side_effect=[ + MVSCmdResponse(rc=16, stdout=" ABC \n REASON: X'A8'", stderr=""), + MVSCmdResponse(rc=0, stdout="", stderr=""), + ] + ) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + result = global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD", cmd="HI" + ) + + assert result == executions + + +def test_global_catalog_run_rmutl_no_cmd(): + rmutl_response = MVSCmdResponse( + rc=0, + stdout=RMUTL_stdout("AUTOASIS", "EMERGENCY"), + stderr="", + ) + + expected_executions = [ + _execution( + name=RMUTL_get_run_name(1), + rc=rmutl_response.rc, + stdout=rmutl_response.stdout, + stderr=rmutl_response.stderr, + ) + ] + expected_details = ("AUTOASIS", "EMERGENCY") + global_catalog.MVSCmd.execute = MagicMock(return_value=rmutl_response) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + actual_executions, actual_details = global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD" + ) + + assert actual_executions == expected_executions + assert actual_details == expected_details + + +def test_global_catalog_run_rmutl_no_cmd_with_failure(): + rmutl_response = MVSCmdResponse( + rc=0, + stdout=RMUTL_stdout("AUTOASIS", "EMERGENCY"), + stderr="", + ) + + expected_executions = [ + _execution( + name=RMUTL_get_run_name(1), + rc=16, + stdout=" ABC \n REASON: X'A8'", + stderr="", + ), + _execution( + name=RMUTL_get_run_name(2), + rc=rmutl_response.rc, + stdout=rmutl_response.stdout, + stderr=rmutl_response.stderr, + ), + ] + expected_details = ("AUTOASIS", "EMERGENCY") + global_catalog.MVSCmd.execute = MagicMock( + side_effect=[ + MVSCmdResponse(rc=16, stdout=" ABC \n REASON: X'A8'", stderr=""), + rmutl_response, + ] + ) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + actual_executions, actual_details = global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD" + ) + + assert actual_executions == expected_executions + assert actual_details == expected_details + + +def test_global_catalog_run_rmutl_no_cmd_many_failures(): + rmutl_response = MVSCmdResponse( + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr="", + ) + + expected_executions = [ + _execution( + name=RMUTL_get_run_name(1), + rc=16, + stdout=" ABC \n REASON: X'A8'", + stderr="", + ), + _execution( + name=RMUTL_get_run_name(2), + rc=16, + stdout="\n\n\n REASON: X'A8'", + stderr="", + ), + _execution( + name=RMUTL_get_run_name(3), + rc=16, + stdout="REASON:X'A8'", + stderr="", + ), + _execution( + name=RMUTL_get_run_name(4), + rc=16, + stdout="\n REASON:X'A8'", + stderr="", + ), + _execution( + name=RMUTL_get_run_name(5), + rc=16, + stdout=" ABC \n REASON: X 'A8'", + stderr="", + ), + _execution( + name=RMUTL_get_run_name(6), + rc=rmutl_response.rc, + stdout=rmutl_response.stdout, + stderr=rmutl_response.stderr, + ), + ] + expected_details = ("AUTOINIT", "UNKNOWN") + global_catalog.MVSCmd.execute = MagicMock( + side_effect=[ + MVSCmdResponse(rc=16, stdout=" ABC \n REASON: X'A8'", stderr=""), + MVSCmdResponse(rc=16, stdout="\n\n\n REASON: X'A8'", stderr=""), + MVSCmdResponse(rc=16, stdout="REASON:X'A8'", stderr=""), + MVSCmdResponse(rc=16, stdout="\n REASON:X'A8'", stderr=""), + MVSCmdResponse(rc=16, stdout=" ABC \n REASON: X 'A8'", stderr=""), + rmutl_response, + ] + ) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + actual_executions, actual_details = global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD" + ) + + assert actual_executions == expected_executions + assert actual_details == expected_details + + +def test_global_catalog_run_rmutl_rc16_error(): + global_catalog.MVSCmd.execute = MagicMock( + return_value=MVSCmdResponse(rc=16, stdout=" ABC \n REASON: X'12'", stderr="") + ) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + + expected_executions = [ + _execution( + name=RMUTL_update_run_name(1), + rc=16, + stdout=" ABC \n REASON: X'12'", + stderr="" + ) + ] + + error_raised = False + try: + global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD", cmd="HI" + ) + except MVSExecutionException as e: + error_raised = True + assert e.message == "DFHRMUTL failed with RC 16 - REASON:X'12'" + assert e.executions == expected_executions + + assert error_raised is True + + +def test_global_catalog_run_rmutl_many_rc16_error(): + global_catalog.MVSCmd.execute = MagicMock( + side_effect=[ + MVSCmdResponse(rc=16, stdout=" ABC \n REASON: X'A8'", stderr=""), + MVSCmdResponse(rc=16, stdout="\n\n\n REASON: X'A8'", stderr=""), + MVSCmdResponse(rc=16, stdout="REASON:X'B2'", stderr=""), + ] + ) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + + expected_executions = [ + _execution(name=RMUTL_update_run_name(1), rc=16, stdout=" ABC \n REASON: X'A8'", stderr=""), + _execution(name=RMUTL_update_run_name(2), rc=16, stdout="\n\n\n REASON: X'A8'", stderr=""), + _execution(name=RMUTL_update_run_name(3), rc=16, stdout="REASON:X'B2'", stderr=""), + ] + + error_raised = False + try: + global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD", cmd="HI" + ) + except MVSExecutionException as e: + error_raised = True + assert e.message == "DFHRMUTL failed with RC 16 - REASON:X'B2'" + assert e.executions == expected_executions + + assert error_raised is True + + +def test_global_catalog_run_rmutl_many_rc_error(): + global_catalog.MVSCmd.execute = MagicMock( + side_effect=[ + MVSCmdResponse(rc=16, stdout=" ABC \n REASON: X'A8'", stderr=""), + MVSCmdResponse(rc=16, stdout="\n\n\n REASON: X'A8'", stderr=""), + MVSCmdResponse(rc=15, stdout="REASON:X'A8'", stderr=""), + ] + ) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + + expected_executions = [ + _execution(name=RMUTL_update_run_name(1), rc=16, stdout=" ABC \n REASON: X'A8'", stderr=""), + _execution(name=RMUTL_update_run_name(2), rc=16, stdout="\n\n\n REASON: X'A8'", stderr=""), + _execution(name=RMUTL_update_run_name(3), rc=15, stdout="REASON:X'A8'", stderr="") + ] + + error_raised = False + try: + global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD", cmd="HI" + ) + except MVSExecutionException as e: + error_raised = True + assert e.message == "DFHRMUTL failed with RC 15" + assert e.executions == expected_executions + + assert error_raised is True + + +def test_global_catalog_run_rmutl_rc_not_0(): + global_catalog.MVSCmd.execute = MagicMock( + return_value=MVSCmdResponse(rc=123, stdout="", stderr="") + ) + global_catalog._get_rmutl_dds = MagicMock(return_value=[]) + + expected_executions = [_execution(name=RMUTL_update_run_name(1), rc=123, stdout="", stderr="")] + + error_raised = False + try: + global_catalog._run_dfhrmutl( + location="DATA.SET", sdfhload="SDFH.LOAD", cmd="HI" + ) + except MVSExecutionException as e: + error_raised = True + assert e.message == "DFHRMUTL failed with RC 123" + assert e.executions == expected_executions + + assert error_raised is True diff --git a/tests/unit/module_utils/test_icetool.py b/tests/unit/module_utils/test_icetool.py new file mode 100644 index 00000000..beb2efb2 --- /dev/null +++ b/tests/unit/module_utils/test_icetool.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException, _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ICETOOL_name, ICETOOL_stderr, ICETOOL_stdout +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _icetool as icetool +import pytest +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +NAME = "TEST.REGIONS.LCD" + + +def test__get_record_count_with_invalid_stdout(): + record_count = icetool._get_record_count("Some invalid STDOUT") + expected = -1 + assert record_count == expected + + +def test__get_record_count_with_record_count_string(): + record_count = icetool._get_record_count("RECORD COUNT: 000000000000001") + expected = 1 + assert record_count == expected + + +def test__get_record_count_with_icetool_stdout(): + record_count = icetool._get_record_count(ICETOOL_stdout(52)) + expected = 52 + assert record_count == expected + + +def test__get_zero_record_count_with_icetool_stdout(): + record_count = icetool._get_record_count(ICETOOL_stdout(0)) + expected = 0 + assert record_count == expected + + +def test__run_icetool(): + icetool._execute_icetool = MagicMock(return_value=MVSCmdResponse(rc=0, stdout=ICETOOL_stdout(52), stderr=ICETOOL_stderr())) + executions, record_count = icetool._run_icetool(NAME) + expected_record_count = 52 + expected_executions = [ + _execution(name=ICETOOL_name(1), rc=0, stdout=ICETOOL_stdout(52), stderr=ICETOOL_stderr()), + ] + assert record_count == expected_record_count + assert executions == expected_executions + + +def test__run_icetool_rc_16_no_reason(): + icetool._execute_icetool = MagicMock(return_value=MVSCmdResponse(rc=16, stdout="", stderr=ICETOOL_stderr())) + with pytest.raises(MVSExecutionException) as e_info: + icetool._run_icetool(NAME) + + assert (e_info.value).message == "ICETOOL failed with RC 16" + + +def test__run_icetool_rc_nonzero(): + icetool._execute_icetool = MagicMock(return_value=MVSCmdResponse(rc=99, stdout="", stderr=ICETOOL_stderr())) + with pytest.raises(MVSExecutionException) as e_info: + icetool._run_icetool(NAME) + + assert (e_info.value).message == "ICETOOL failed with RC 99" + + +def test__run_icetool_with_no_zoau_response(): + rc = 0 + stdout = "" + stderr = "" + icetool._execute_icetool = MagicMock(return_value=MVSCmdResponse(rc=rc, stdout=stdout, stderr=stderr)) + + expected_executions = [ + _execution(name=ICETOOL_name(1), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(2), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(3), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(4), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(5), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(6), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(7), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(8), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(9), rc=rc, stdout=stdout, stderr=stderr), + _execution(name=ICETOOL_name(10), rc=rc, stdout=stdout, stderr=stderr), + ] + + try: + icetool._run_icetool(NAME) + except MVSExecutionException as e: + assert e.message == "ICETOOL Command output not recognised" + assert e.executions == expected_executions + else: + assert False diff --git a/tests/unit/module_utils/test_intrapartition.py b/tests/unit/module_utils/test_intrapartition.py new file mode 100644 index 00000000..6de4a8b1 --- /dev/null +++ b/tests/unit/module_utils/test_intrapartition.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import CYLINDERS, MEGABYTES + +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import PYTHON_LANGUAGE_FEATURES_MESSAGE + +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _td_intrapartition as intrapartition +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.td_intrapartition import SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT +import pytest +import sys + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_megabytes(): + dataset = dict( + name="ANSI.CYLS.DFHINTRA", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_intra = data_set_utils._build_idcams_define_cmd( + intrapartition._get_idcams_cmd_intra(dataset) + ) + assert ( + idcams_cmd_intra + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHINTRA) - + MEGABYTES(100 10) - + RECORDSIZE(1529 1529) - + NONINDEXED - + CONTROLINTERVALSIZE(1536)) - + DATA (NAME(ANSI.CYLS.DFHINTRA.DATA)) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_cylinders(): + dataset = dict( + name="ANSI.CYLS.DFHINTRA", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_intra = data_set_utils._build_idcams_define_cmd( + intrapartition._get_idcams_cmd_intra(dataset) + ) + assert ( + idcams_cmd_intra + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHINTRA) - + CYLINDERS(100 10) - + RECORDSIZE(1529 1529) - + NONINDEXED - + CONTROLINTERVALSIZE(1536)) - + DATA (NAME(ANSI.CYLS.DFHINTRA.DATA)) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_volumes(): + dataset = dict( + name="ANSI.CYLS.DFHINTRA", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1"] + ) + idcams_cmd_intra = data_set_utils._build_idcams_define_cmd( + intrapartition._get_idcams_cmd_intra(dataset) + ) + assert ( + idcams_cmd_intra + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHINTRA) - + MEGABYTES(100 10) - + RECORDSIZE(1529 1529) - + NONINDEXED - + CONTROLINTERVALSIZE(1536) - + VOLUMES(vserv1)) - + DATA (NAME(ANSI.CYLS.DFHINTRA.DATA)) + """ + ) + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_multiple_volumes(): + dataset = dict( + name="ANSI.CYLS.DFHINTRA", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1", "vserv2"] + ) + idcams_cmd_intra = data_set_utils._build_idcams_define_cmd( + intrapartition._get_idcams_cmd_intra(dataset) + ) + assert ( + idcams_cmd_intra + == """ + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHINTRA) - + MEGABYTES(100 10) - + RECORDSIZE(1529 1529) - + NONINDEXED - + CONTROLINTERVALSIZE(1536) - + VOLUMES(vserv1 vserv2)) - + DATA (NAME(ANSI.CYLS.DFHINTRA.DATA)) + """ + ) diff --git a/tests/unit/module_utils/test_jcl_helper.py b/tests/unit/module_utils/test_jcl_helper.py new file mode 100644 index 00000000..e901d0a6 --- /dev/null +++ b/tests/unit/module_utils/test_jcl_helper.py @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._jcl_helper import ( + JCLHelper, JCL_PREFIX, JOB_CARD, EXECS +) +import pytest +import sys + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_render_jcl(): + jcl_helper = JCLHelper() + jcl_helper.job_data = { + JOB_CARD: {"job_name": "TESTJOB"}, + EXECS: [{"name": "CICS", "pgm": "TESTPRG", "parm": "SI", + "dds": [{"COUT": [{"disp": "SHR", "dsn": "DATA.SET.NAME"}]}]}] + } + expected_jcl = ['//TESTJOB JOB', + '//CICS EXEC PGM=TESTPRG,PARM=SI', + '//COUT DD DISP=SHR,DSN=DATA.SET.NAME', + '//'] + + jcl_helper.render_jcl() + assert jcl_helper.jcl == expected_jcl + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_write_job_statement(): + jcl_helper = JCLHelper() + job_name = "TESTJOB" + job_parameters = {"job_name": job_name, + "class": "A", + "user": "BOBSMITH", + "region": "0M"} + + expected_jcl = JCL_PREFIX + job_name + \ + " JOB CLASS=A,USER=BOBSMITH,REGION=0M" + jcl_helper._write_job_statement(job_parameters) + assert jcl_helper.jcl == [expected_jcl] + assert len(jcl_helper.jcl) == 1 + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_write_long_job_statement(): + jcl_helper = JCLHelper() + job_name = "TESTJOB" + job_parameters = {"job_name": job_name, + "class": "A", + "user": "KIERA", + "region": "0M", + "msgclass": "A", + "memlimit": "10G", + "additional_parameter": "TESTONE", + "additional_parameter1": "TESTTWO"} + + expected_jcl = [JCL_PREFIX + job_name + " JOB CLASS=A,USER=KIERA,REGION=0M,MSGCLASS=A,MEMLIMIT=10G,", + '// ADDITIONAL_PARAMETER=TESTONE,ADDITIONAL_PARAMETER1=TESTTWO'] + jcl_helper._write_job_statement(job_parameters) + assert jcl_helper.jcl == expected_jcl + assert len(jcl_helper.jcl) == 2 + + +def test_write_dds(): + jcl_helper = JCLHelper() + dds = [{"COUT": [{"disp": "SHR", "dsn": "DATA.SET.NAME"}]}] + jcl_helper._write_dds(dds) + assert jcl_helper.jcl == [ + "//COUT DD DISP=SHR,DSN=DATA.SET.NAME"] + + +def test_write_dds_with_instream(): + jcl_helper = JCLHelper() + dds = [{"COUT": [{"disp": "SHR", "dsn": "DATA.SET.NAME"}]}, + {"INPUT": {"content": ['INSTREAM DATA']}}] + jcl_helper._write_dds(dds) + assert jcl_helper.jcl == ["//COUT DD DISP=SHR,DSN=DATA.SET.NAME", + "//INPUT DD *", + "INSTREAM DATA", + "/*"] + + +def test_write_dds_with_concatenation(): + jcl_helper = JCLHelper() + dds = [{"COUT": [{"disp": "SHR", "dsn": "DATA.SET.NAME"}]}, + {"CONCAT": [{"disp": "OLD", "dsn": "DATA.SET.NAME1"}, + {"disp": "SHR", "dsn": "DATA.SET.NAME2"}]}] + jcl_helper._write_dds(dds) + assert jcl_helper.jcl == ["//COUT DD DISP=SHR,DSN=DATA.SET.NAME", + "//CONCAT DD DISP=OLD,DSN=DATA.SET.NAME1", + "// DD DISP=SHR,DSN=DATA.SET.NAME2"] + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_write_exec_statement(): + jcl_helper = JCLHelper() + expected_jcl = [ + "//CICS EXEC PGM=DFHSIP,PARM=SI,REGION=0M,TIME=1440"] + jcl_helper._write_exec_statements([{"name": "CICS", "pgm": "DFHSIP", "parm": "SI", "region": "0M", + "time": 1440, "dds": {}}]) + assert jcl_helper.jcl == expected_jcl + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_write_exec_statement_with_dds(): + jcl_helper = JCLHelper() + expected_jcl = ["//CICS EXEC PGM=DFHSIP,PARM=SI,REGION=0M,TIME=1440", + "//COUT DD DISP=SHR,DSN=DATA.SET.NAME"] + jcl_helper._write_exec_statements([{"name": "CICS", "pgm": "DFHSIP", "parm": "SI", "region": "0M", "time": 1440, + "dds": [{"COUT": [{"disp": "SHR", "dsn": "DATA.SET.NAME"}]}]}]) + assert jcl_helper.jcl == expected_jcl + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_build_exec_statement_string(): + expected_jcl = "//CICS EXEC PGM=DFHSIP,PARM=SI,REGION=0M,TIME=1440" + exec_dict = {"name": "CICS", "pgm": "DFHSIP", "parm": "SI", "region": "0M", "time": 1440} + assert JCLHelper._build_exec_statement_string( + exec_dict) == expected_jcl + + +def test_build_exec_statement_string_with_no_parameters(): + expected_jcl = "//CICS EXEC" + exec_dict = {"name": "CICS"} + assert JCLHelper._build_exec_statement_string( + exec_dict) == expected_jcl + + +def test_write_list_of_strings(): + jcl_helper = JCLHelper() + expected_jcl = ["//TEST DD DISP=SHR,PARAM=TEST", + "//TEST2 DD DISP=SHR,PARAM=TEST2"] + jcl_helper._write_list_of_strings(expected_jcl) + assert jcl_helper.jcl == expected_jcl + + +def test_write_list_of_strings_pass_a_string(): + jcl_helper = JCLHelper() + expected_jcl = "//TEST DD DISP=SHR,PARAM=TEST" + jcl_helper._write_list_of_strings(expected_jcl) + assert jcl_helper.jcl == [expected_jcl] + + +def test_write_instream_data(): + jcl_helper = JCLHelper() + jcl_helper._write_instream_data("COUT", {"content": ["FIRST=ONE", "SECOND=TWO"]}) + expected_statement = ["//COUT DD *", "FIRST=ONE", "SECOND=TWO", "/*"] + assert jcl_helper.jcl == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_write_instream_data_with_additional_parameters(): + jcl_helper = JCLHelper() + jcl_helper._write_instream_data( + "COUT", {"content": ["FIRST=ONE", "SECOND=TWO"], "DLM": "@@", "PARAM2": "TWO"}) + expected_statement = [ + "//COUT DD *,DLM=@@,PARAM2=TWO", "FIRST=ONE", "SECOND=TWO", "/*"] + assert jcl_helper.jcl == expected_statement + + +def test_write_dd_statement(): + jcl_helper = JCLHelper() + jcl_helper._write_dd_statement( + "COUT", {"disp": "SHR", "dsn": "DATA.SET.NAME"}) + expected_statement = ["//COUT DD DISP=SHR,DSN=DATA.SET.NAME"] + assert jcl_helper.jcl == expected_statement + + +def test_write_dd_concatenation(): + jcl_helper = JCLHelper() + jcl_helper._write_dd_concatenation("SYSIN", [{"disp": "SHR", "dsn": "DATA.ONE"}, + {"disp": "SHR", "dsn": "DATA.TWO"}, + {"disp": "SHR", "dsn": "DATA.THREE"}]) + expected_statement = ["//SYSIN DD DISP=SHR,DSN=DATA.ONE", + "// DD DISP=SHR,DSN=DATA.TWO", + "// DD DISP=SHR,DSN=DATA.THREE"] + + assert jcl_helper.jcl == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_write_dd_concatenation_with_long_line(): + jcl_helper = JCLHelper() + jcl_helper._write_dd_concatenation("SYSIN", [{"disp": "SHR", "dsn": "DATA.ONE", "ONE": "FIRSTVAL", + "TWO": "SECONDVAL", "THIRD": "THIRDVAL", "FOUR": "FOURTHVAL"}, + {"disp": "SHR", "dsn": "DATA.TWO"}, + {"disp": "SHR", "dsn": "DATA.THREE"}]) + expected_statement = ["//SYSIN DD DISP=SHR,DSN=DATA.ONE,ONE=FIRSTVAL,TWO=SECONDVAL,", + "// THIRD=THIRDVAL,FOUR=FOURTHVAL", + "// DD DISP=SHR,DSN=DATA.TWO", + "// DD DISP=SHR,DSN=DATA.THREE"] + + assert jcl_helper.jcl == expected_statement + + +def test_write_null_statement(): + jcl_helper = JCLHelper() + jcl_helper._write_null_statement() + expected_statement = ["//"] + assert jcl_helper.jcl == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_build_job_statement(): + job_parameters = {"job_name": "JOB123", + "class": "A", + "user": "BOBSMITH", + "region": "0M"} + expected_statement = JCL_PREFIX + "JOB123 JOB CLASS=A,USER=BOBSMITH,REGION=0M" + assert JCLHelper._build_job_statement( + job_parameters) == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_build_job_statement_with_accounting_info(): + job_parameters = {"job_name": "JOB123", + "accounting_information": {"pano": "ABCD"}, + "class": "A", + "user": "BOBSMITH", + "region": "0M"} + expected_statement = JCL_PREFIX + "JOB123 JOB ABCD,CLASS=A,USER=BOBSMITH,REGION=0M" + assert JCLHelper._build_job_statement( + job_parameters) == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_build_job_statement_with_programmer_name(): + job_parameters = {"job_name": "JOB123", + "programmer_name": "USER", + "class": "A", + "user": "BOBSMITH", + "region": "0M"} + expected_statement = JCL_PREFIX + "JOB123 JOB ,'USER',CLASS=A,USER=BOBSMITH,REGION=0M" + assert JCLHelper._build_job_statement( + job_parameters) == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_build_job_statement_with_programmer_name_and_account_info(): + job_parameters = {"job_name": "JOB123", + "accounting_information": {"pano": "ABCD"}, + "programmer_name": "USER", + "class": "A", + "user": "BOBSMITH", + "region": "0M"} + expected_statement = JCL_PREFIX + "JOB123 JOB ABCD,'USER',CLASS=A,USER=BOBSMITH,REGION=0M" + assert JCLHelper._build_job_statement( + job_parameters) == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_build_job_statement_with_msglevel_parameter_statements(): + job_parameters = {"job_name": "JOB123", + "programmer_name": "USER", + "class": "A", + "msglevel": {"statements": 0}, + "user": "BOBSMITH", + "region": "0M"} + expected_statement = JCL_PREFIX + "JOB123 JOB ,'USER',CLASS=A,MSGLEVEL=0,USER=BOBSMITH,REGION=0M" + assert JCLHelper._build_job_statement( + job_parameters) == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_build_job_statement_with_msglevel_parameter_messages(): + job_parameters = {"job_name": "JOB123", + "programmer_name": "USER", + "class": "A", + "msglevel": {"messages": 1}, + "user": "BOBSMITH", + "region": "0M"} + expected_statement = JCL_PREFIX + "JOB123 JOB ,'USER',CLASS=A,MSGLEVEL=(,1),USER=BOBSMITH,REGION=0M" + assert JCLHelper._build_job_statement( + job_parameters) == expected_statement + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_build_job_statement_with_msglevel_parameter(): + job_parameters = {"job_name": "JOB123", + "programmer_name": "USER", + "class": "A", + "msglevel": {"statements": 1, "messages": 1}, + "user": "BOBSMITH", + "region": "0M"} + expected_statement = JCL_PREFIX + "JOB123 JOB ,'USER',CLASS=A,MSGLEVEL=(1,1),USER=BOBSMITH,REGION=0M" + assert JCLHelper._build_job_statement( + job_parameters) == expected_statement + + +def test_format_job_positional_parameters_two_accounting_infomation_values(): + jcl_helper = JCLHelper() + job_parameters = {"accounting_information": {"pano": "ABCD", "times": 12}, "programmer_name": "USER"} + expected_statement = "(ABCD,,12),USER" + assert jcl_helper._format_job_positional_parameters(job_parameters) == expected_statement + + +def test_format_job_positional_parameters_accounting_information_only(): + jcl_helper = JCLHelper() + job_parameters = {"accounting_information": {"pano": "ABCD"}} + expected_statement = "ABCD" + assert jcl_helper._format_job_positional_parameters(job_parameters) == expected_statement + + +def test_format_job_positional_parameters_two_accounting_infomation_values(): + jcl_helper = JCLHelper() + job_parameters = {"accounting_information": {"pano": "ABCD", "times": 12}} + expected_statement = "(ABCD,,12)" + assert jcl_helper._format_job_positional_parameters(job_parameters) == expected_statement + + +def test_format_job_positional_parameters_only_programmer_name(): + jcl_helper = JCLHelper() + job_parameters = {"programmer_name": "USER"} + expected_statement = ",'USER'" + assert jcl_helper._format_job_positional_parameters(job_parameters) == expected_statement + + +def test_format_job_positional_parameters_positional_info_not_set(): + jcl_helper = JCLHelper() + job_parameters = {"job_name": "JOB123"} + expected_statement = None + assert jcl_helper._format_job_positional_parameters(job_parameters) == expected_statement + + +def test_format_job_positional_parameters_job_parameters_none(): + jcl_helper = JCLHelper() + expected_statement = None + assert jcl_helper._format_job_positional_parameters(None) == expected_statement + + +def test_format_accounting_information(): + accounting_information = {"pano": "AB12", "room": "ROOM", "times": "TIME", "lines": 23} + formatted_information = JCLHelper._format_accounting_information(accounting_information) + assert formatted_information == "(AB12,ROOM,TIME,23)" + + +def test_format_accounting_information_with_missing_values(): + accounting_information = {"pano": "AB12", "times": "TIME", "lines": 23} + formatted_information = JCLHelper._format_accounting_information(accounting_information) + assert formatted_information == "(AB12,,TIME,23)" + + +def test_format_accounting_information_with_missing_value_2(): + accounting_information = {"room": "ABC", "times": "TIME", "lines": 23} + formatted_information = JCLHelper._format_accounting_information(accounting_information) + assert formatted_information == "(,ABC,TIME,23)" + + +def test_format_accounting_information_with_missing_values_3(): + accounting_information = {"pano": "EDFG", "lines": 23, "cards": "CARD"} + formatted_information = JCLHelper._format_accounting_information(accounting_information) + assert formatted_information == "(EDFG,,,23,CARD)" + + +def test_format_accounting_information_with_one_value(): + accounting_information = {"pano": "EDFG"} + formatted_information = JCLHelper._format_accounting_information(accounting_information) + assert formatted_information == "EDFG" + + +def test_format_programmer_name_no_apostrophes(): + name = "USER" + formatted = JCLHelper._format_programmer_name(name) + assert formatted == "'USER'" + + +def test_format_programmer_name_with_apostrophes(): + name = "USER'NAME" + formatted = JCLHelper._format_programmer_name(name) + assert formatted == "'USER''NAME'" + + +def test_format_msglevel_parameter_both_parameters(): + msglevel_dict = {"statements": 1, "messages": 0} + msglevel_dict_formatted = JCLHelper._format_msglevel_parameter(msglevel_dict) + assert msglevel_dict_formatted == "(1,0)" + + +def test_format_msglevel_parameter_statements_only(): + msglevel_dict = {"statements": 1} + msglevel_dict_formatted = JCLHelper._format_msglevel_parameter(msglevel_dict) + assert msglevel_dict_formatted == 1 + + +def test_format_msglevel_parameter_messages_only(): + msglevel_dict = {"messages": 1} + msglevel_dict_formatted = JCLHelper._format_msglevel_parameter(msglevel_dict) + assert msglevel_dict_formatted == "(,1)" + + +def test_build_dd_statement(): + jcl_helper = JCLHelper() + expected_statement = "//COUT DD DISP=SHR,DSN=DATA.SET.NAME" + parameters = {"disp": "SHR", "dsn": "DATA.SET.NAME"} + assert jcl_helper._build_dd_statement("COUT", parameters) == expected_statement + + +def test_build_dd_statement_with_no_dd_name(): + jcl_helper = JCLHelper() + expected_statement = None + parameters = {"disp": "SHR", "dsn": "DATA.SET.NAME"} + assert jcl_helper._build_dd_statement(None, parameters) == expected_statement + + +def test_build_dd_concatenation_list(): + jcl_helper = JCLHelper() + job_name = "DFHTEST" + list_of_dicts = [{'disp': 'SHR', 'dsn': 'TEST.CICS.PRINT'}, + {'disp': 'OLD', 'dsn': 'TEST.CICS.PRINT2'}] + + expected = ['//DFHTEST DD DISP=SHR,DSN=TEST.CICS.PRINT', + '// DD DISP=OLD,DSN=TEST.CICS.PRINT2'] + assert jcl_helper._build_dd_concatenation_list( + job_name, list_of_dicts) == expected + + +def test_build_dd_concatenation_list_length_one(): + jcl_helper = JCLHelper() + job_name = "DFHTEST" + list_of_dicts = [{'disp': 'SHR', 'dsn': 'TEST.CICS.PRINT'}] + + expected = ['//DFHTEST DD DISP=SHR,DSN=TEST.CICS.PRINT'] + assert jcl_helper._build_dd_concatenation_list( + job_name, list_of_dicts) == expected + + +def test_format_dd_name(): + assert JCLHelper._format_dd_name("TEST") == "TEST " + + +def test_exceeds_line_length_short(): + dd_statement = "// This is a short statement" + assert JCLHelper._exceeds_line_length(dd_statement) is False + + +def test_exceeds_line_length_long(): + dd_statement = "// This is a longer test comment which is over the current specified max limit" + assert JCLHelper._exceeds_line_length(dd_statement) is True + + +def test_split_too_long_dd_statement_list(): + long_dd_statement = ["//TOOLONG DD DISP=SHR,DSN=TOOLONGNAME,PARM1=ONE,PARM2=TWO,PARM3=THREE,PARM4=FOUR", + "//THIS DD ONE=SHORT"] + split_statement = JCLHelper._split_long_dd_statement_list(long_dd_statement) + assert split_statement == ["//TOOLONG DD DISP=SHR,DSN=TOOLONGNAME,PARM1=ONE,PARM2=TWO,PARM3=THREE,", + "// PARM4=FOUR", + "//THIS DD ONE=SHORT"] + + +def test_split_too_long_dd_statement_list_not_a_list(): + long_dd_statement = "//TOOLONG DD DISP=SHR,DSN=TOOLONGNAME,PARM1=ONE,PARM2=TWO,PARM3=THREE,PARM4=FOUR" + split_statement = JCLHelper._split_long_dd_statement_list(long_dd_statement) + assert split_statement == ["//TOOLONG DD DISP=SHR,DSN=TOOLONGNAME,PARM1=ONE,PARM2=TWO,PARM3=THREE,", + "// PARM4=FOUR"] + + +def test_split_too_long_dd_statement_list_but_not_long(): + dd_statement_list = ["//NOTLONG DD DISP=SHR,DSN=TOOLONGNAME,PARM1=ONE", + "//THIS DD ONE=SHORT"] + split_statement = JCLHelper._split_long_dd_statement_list(dd_statement_list) + assert split_statement == ["//NOTLONG DD DISP=SHR,DSN=TOOLONGNAME,PARM1=ONE", + "//THIS DD ONE=SHORT"] + + +def test_split_too_long_dd_statement(): + long_dd_statement = "//TOOLONG DD DISP=SHR,DSN=TOOLONGNAME,PARM1=ONE,PARM2=TWO,PARM3=THREE,PARM4=FOUR" + split_statement = JCLHelper._split_long_dd_statement(long_dd_statement) + assert split_statement == ["//TOOLONG DD DISP=SHR,DSN=TOOLONGNAME,PARM1=ONE,PARM2=TWO,PARM3=THREE,", + "// PARM4=FOUR"] + + +def test_add_parameters_onto_existing_dd_statement(): + existing_dd_statement = "//TEST DD NEED SOME PARAMETERS" + parameter_list = ["PARAM1=ONE", "PARAM2=TWO"] + expected = "//TEST DD NEED SOME PARAMETERS,PARAM1=ONE,PARAM2=TWO" + assert JCLHelper._add_parameters_onto_dd_statement( + existing_dd_statement, parameter_list, True) == expected + + +def test_build_parameter_string(): + parameter_list = ["TEST1=PARM1", "TEST2=PARM2", "TEST3=PARM3"] + assert JCLHelper._build_parameter_string( + parameter_list) == "TEST1=PARM1,TEST2=PARM2,TEST3=PARM3" + + +def test_build_parameter_string_when_none(): + parameter_list = None + assert JCLHelper._build_parameter_string(parameter_list) == "" + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_concatenate_key_value_pairs_into_list(): + dictionary = {"Val1": "One", "Val2": "Two", "Val3": "Three", "Val4": "Four"} + dictionary_unpacked = JCLHelper._concatenate_key_value_pairs_into_list( + dictionary) + assert dictionary_unpacked == ["VAL1=One", "VAL2=Two", "VAL3=Three", "VAL4=Four"] + + +def test_add_single_quotes_to_text(): + assert JCLHelper._add_single_quotes_to_text("'hello'") == "'hello'" + assert JCLHelper._add_single_quotes_to_text("hello") == "'hello'" + assert JCLHelper._add_single_quotes_to_text("\"'hello'\"") == "\"'hello'\"" + assert JCLHelper._add_single_quotes_to_text("\"hel'lo\"") == "'hel''lo'" + assert JCLHelper._add_single_quotes_to_text("hel'lo") == "'hel''lo'" + assert JCLHelper._add_single_quotes_to_text("h'e'l'l'o") == "'h''e''l''l''o'" diff --git a/tests/unit/module_utils/test_local_catalog.py b/tests/unit/module_utils/test_local_catalog.py new file mode 100644 index 00000000..6b7b8906 --- /dev/null +++ b/tests/unit/module_utils/test_local_catalog.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import CYLINDERS, MEGABYTES +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException, _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import PYTHON_LANGUAGE_FEATURES_MESSAGE, CCUTL_name, CCUTL_stderr + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _local_catalog as local_catalog_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.local_catalog import SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT +import pytest +import sys + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +NAME = "ANSI.TEST.DFHLCD" + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_megabytes(): + catalog = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_lcd = data_set_utils._build_idcams_define_cmd(local_catalog_utils._get_idcams_cmd_lcd(catalog)) + assert idcams_cmd_lcd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHLCD) - + MEGABYTES(200 5) - + RECORDSIZE(70 2041) - + INDEXED - + KEYS(52 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE) - + DATA (NAME(ANSI.TEST.DFHLCD.DATA) - + CONTROLINTERVALSIZE(2048)) - + INDEX (NAME(ANSI.TEST.DFHLCD.INDEX)) + ''' + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_cylinders(): + catalog = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_lcd = data_set_utils._build_idcams_define_cmd(local_catalog_utils._get_idcams_cmd_lcd(catalog)) + assert idcams_cmd_lcd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHLCD) - + CYLINDERS(200 5) - + RECORDSIZE(70 2041) - + INDEXED - + KEYS(52 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE) - + DATA (NAME(ANSI.TEST.DFHLCD.DATA) - + CONTROLINTERVALSIZE(2048)) - + INDEX (NAME(ANSI.TEST.DFHLCD.INDEX)) + ''' + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_volumes(): + catalog = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1"] + ) + idcams_cmd_lcd = data_set_utils._build_idcams_define_cmd(local_catalog_utils._get_idcams_cmd_lcd(catalog)) + assert idcams_cmd_lcd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHLCD) - + CYLINDERS(200 5) - + RECORDSIZE(70 2041) - + INDEXED - + KEYS(52 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + VOLUMES(vserv1)) - + DATA (NAME(ANSI.TEST.DFHLCD.DATA) - + CONTROLINTERVALSIZE(2048)) - + INDEX (NAME(ANSI.TEST.DFHLCD.INDEX)) + ''' + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_multiple_volumes(): + catalog = dict( + name=NAME, + sdfhload="CICSTS.IN56.SDFHLOAD", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1", "vserv2"] + ) + idcams_cmd_lcd = data_set_utils._build_idcams_define_cmd(local_catalog_utils._get_idcams_cmd_lcd(catalog)) + assert idcams_cmd_lcd == ''' + DEFINE CLUSTER (NAME(ANSI.TEST.DFHLCD) - + CYLINDERS(200 5) - + RECORDSIZE(70 2041) - + INDEXED - + KEYS(52 0) - + FREESPACE(10 10) - + SHAREOPTIONS(2) - + REUSE - + VOLUMES(vserv1 vserv2)) - + DATA (NAME(ANSI.TEST.DFHLCD.DATA) - + CONTROLINTERVALSIZE(2048)) - + INDEX (NAME(ANSI.TEST.DFHLCD.INDEX)) + ''' + + +def test_ccutl_response(): + local_catalog = { + "exists": False, + "name": NAME, + "size": { + "primary": SPACE_PRIMARY_DEFAULT, + "secondary": SPACE_SECONDARY_DEFAULT, + "unit": MEGABYTES + }, + "state": "initial", + "vsam": False, + "sdfhload": "CICSTS.IN56.SDFHLOAD", + } + + expected_executions = [ + _execution(name=CCUTL_name(), rc=0, stdout="stdout", stderr=CCUTL_stderr(NAME)), + ] + + local_catalog_utils._execute_dfhccutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout="stdout", stderr=CCUTL_stderr(NAME)) + ) + executions = local_catalog_utils._run_dfhccutl(local_catalog) + + assert executions == expected_executions + + +def test_bad_ccutl_response(): + local_catalog = { + "exists": False, + "name": NAME, + "size": { + "primary": SPACE_PRIMARY_DEFAULT, + "secondary": SPACE_SECONDARY_DEFAULT, + "unit": MEGABYTES + }, + "state": "initial", + "vsam": False, + "sdfhload": "CICSTS.IN56.SDFHLOAD", + } + + expected_executions = [ + _execution(name=CCUTL_name(), rc=99, stdout="stdout", stderr=CCUTL_stderr(NAME)), + ] + + local_catalog_utils._execute_dfhccutl = MagicMock( + return_value=MVSCmdResponse(rc=99, stdout="stdout", stderr=CCUTL_stderr(NAME)) + ) + + try: + local_catalog_utils._run_dfhccutl(local_catalog) + except MVSExecutionException as e: + error_message = e.message + executions = e.executions + + assert error_message == "DFHCCUTL failed with RC 99" + assert executions == expected_executions + else: + assert False diff --git a/tests/unit/module_utils/test_local_request_queue.py b/tests/unit/module_utils/test_local_request_queue.py new file mode 100644 index 00000000..3fded677 --- /dev/null +++ b/tests/unit/module_utils/test_local_request_queue.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import CYLINDERS, MEGABYTES + +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import PYTHON_LANGUAGE_FEATURES_MESSAGE +__metaclass__ = type +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _local_request_queue as local_request_queue +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.local_request_queue import SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT +import pytest +import sys + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_megabytes(): + dataset = dict( + name="ANSI.CYLS.DFHLRQ", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_lrq = data_set_utils._build_idcams_define_cmd(local_request_queue._get_idcams_cmd_lrq(dataset)) + assert idcams_cmd_lrq == ''' + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHLRQ) - + MEGABYTES(4 1) - + RECORDSIZE(2232 2400) - + INDEXED - + KEYS(40 0) - + FREESPACE(0 10) - + SHAREOPTIONS(2 3) - + REUSE - + LOG(UNDO)) - + DATA (NAME(ANSI.CYLS.DFHLRQ.DATA) - + CONTROLINTERVALSIZE(2560)) - + INDEX (NAME(ANSI.CYLS.DFHLRQ.INDEX)) + ''' + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_get_idcams_cmd_cylinders(): + dataset = dict( + name="ANSI.CYLS.DFHLRQ", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + idcams_cmd_lrq = data_set_utils._build_idcams_define_cmd(local_request_queue._get_idcams_cmd_lrq(dataset)) + assert idcams_cmd_lrq == ''' + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHLRQ) - + CYLINDERS(4 1) - + RECORDSIZE(2232 2400) - + INDEXED - + KEYS(40 0) - + FREESPACE(0 10) - + SHAREOPTIONS(2 3) - + REUSE - + LOG(UNDO)) - + DATA (NAME(ANSI.CYLS.DFHLRQ.DATA) - + CONTROLINTERVALSIZE(2560)) - + INDEX (NAME(ANSI.CYLS.DFHLRQ.INDEX)) + ''' + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_volumes(): + dataset = dict( + name="ANSI.CYLS.DFHLRQ", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1"] + ) + idcams_cmd_lrq = data_set_utils._build_idcams_define_cmd(local_request_queue._get_idcams_cmd_lrq(dataset)) + assert idcams_cmd_lrq == ''' + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHLRQ) - + CYLINDERS(4 1) - + RECORDSIZE(2232 2400) - + INDEXED - + KEYS(40 0) - + FREESPACE(0 10) - + SHAREOPTIONS(2 3) - + REUSE - + LOG(UNDO) - + VOLUMES(vserv1)) - + DATA (NAME(ANSI.CYLS.DFHLRQ.DATA) - + CONTROLINTERVALSIZE(2560)) - + INDEX (NAME(ANSI.CYLS.DFHLRQ.INDEX)) + ''' + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_get_idcams_cmd_multiple_volumes(): + dataset = dict( + name="ANSI.CYLS.DFHLRQ", + state="initial", + exists=False, + data_set_organization="NONE", + unit=CYLINDERS, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1", "vserv2"] + ) + idcams_cmd_lrq = data_set_utils._build_idcams_define_cmd(local_request_queue._get_idcams_cmd_lrq(dataset)) + assert idcams_cmd_lrq == ''' + DEFINE CLUSTER (NAME(ANSI.CYLS.DFHLRQ) - + CYLINDERS(4 1) - + RECORDSIZE(2232 2400) - + INDEXED - + KEYS(40 0) - + FREESPACE(0 10) - + SHAREOPTIONS(2 3) - + REUSE - + LOG(UNDO) - + VOLUMES(vserv1 vserv2)) - + DATA (NAME(ANSI.CYLS.DFHLRQ.DATA) - + CONTROLINTERVALSIZE(2560)) - + INDEX (NAME(ANSI.CYLS.DFHLRQ.INDEX)) + ''' diff --git a/tests/unit/module_utils/test_transaction_dump.py b/tests/unit/module_utils/test_transaction_dump.py new file mode 100644 index 00000000..aeec1de8 --- /dev/null +++ b/tests/unit/module_utils/test_transaction_dump.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type +import pytest +import sys +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import PYTHON_LANGUAGE_FEATURES_MESSAGE +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import MEGABYTES +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._transaction_dump import _build_seq_data_set_definition_transaction_dump +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.transaction_dump import SPACE_PRIMARY_DEFAULT, SPACE_SECONDARY_DEFAULT + + +@pytest.mark.skipif(sys.version_info.major < 3, + reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_transaction_dump_definition_megabytes(): + data_set = dict( + name="ANSI.M.DFHDMPA", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT + ) + + definition = _build_seq_data_set_definition_transaction_dump(data_set) + test_definition = DatasetDefinition( + dataset_name="ANSI.M.DFHDMPA", + block_size=4096, + record_length=4092, + record_format="VB", + disposition="NEW", + normal_disposition="CATALOG", + conditional_disposition="DELETE", + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + primary_unit="M", + secondary_unit="M", + type="SEQ" + ) + + assert definition.__dict__ == test_definition.__dict__ + + +@pytest.mark.skipif(sys.version_info.major < 3, + reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_trace_definition_volumes(): + data_set = dict( + name="ANSI.M.DFHDMPA", + state="initial", + exists=False, + data_set_organization="NONE", + unit=MEGABYTES, + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + volumes=["vserv1"] + ) + + definition = _build_seq_data_set_definition_transaction_dump(data_set) + test_definition = DatasetDefinition( + dataset_name="ANSI.M.DFHDMPA", + block_size=4096, + record_length=4092, + record_format="VB", + disposition="NEW", + normal_disposition="CATALOG", + conditional_disposition="DELETE", + primary=SPACE_PRIMARY_DEFAULT, + secondary=SPACE_SECONDARY_DEFAULT, + primary_unit="M", + secondary_unit="M", + type="SEQ", + volumes=["vserv1"] + ) + + assert definition.__dict__ == test_definition.__dict__ diff --git a/tests/unit/modules/test_aux_temp_storage.py b/tests/unit/modules/test_aux_temp_storage.py new file mode 100644 index 00000000..0c0b4ebf --- /dev/null +++ b/tests/unit/modules/test_aux_temp_storage.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils, _icetool as icetool +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + ICETOOL_name, + ICETOOL_stderr, + ICETOOL_stdout, + IDCAMS_delete_run_name, + IDCAMS_delete, + IDCAMS_create_run_name, + LISTDS_data_set_doesnt_exist, + LISTDS_data_set, + LISTDS_run_name, + set_module_args +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules import aux_temp_storage +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse +import pytest +import sys + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +__metaclass__ = type + +NAME = "TEST.REGIONS.DFHTEMP" + +default_arg_parms = { + "space_primary": 5, + "space_secondary": 3, + "space_type": "M", + "region_data_sets": {"dfhtemp": {"dsn": NAME}}, + "state": "initial", +} + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + temp_module = aux_temp_storage.AnsibleAuxiliaryTempModule() + temp_module._module.fail_json = MagicMock(return_value=None) + temp_module._module.exit_json = MagicMock(return_value=None) + return temp_module + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_temp_ds(): + temp_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, NAME, "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + + temp_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert temp_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_aux_temp_storage_ds(): + temp_module = initialise_module(state="absent") + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(NAME), "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + ] + ) + + temp_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + ) + assert temp_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_temp_and_replace(): + temp_module = initialise_module() + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + data_set_utils._execute_idcams = MagicMock( + side_effect=[ + MVSCmdResponse(0, IDCAMS_delete(NAME), ""), + MVSCmdResponse(0, NAME, ""), + ] + ) + icetool._execute_icetool = MagicMock( + return_value=( + MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ) + ) + ) + + temp_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert temp_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_remove_non_existent_temp(): + temp_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + + temp_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + ) + assert temp_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_on_non_existent_temp(): + temp_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + + temp_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr=""), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr=""), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=True, + msg="Data set {0} does not exist.".format(NAME), + ) + assert temp_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_success_temp(): + temp_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + + temp_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="" + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=False, + failed=False, + msg="", + ) + assert temp_module.get_result() == expected_result diff --git a/tests/unit/modules/test_aux_trace.py b/tests/unit/modules/test_aux_trace.py new file mode 100644 index 00000000..a5491a95 --- /dev/null +++ b/tests/unit/modules/test_aux_trace.py @@ -0,0 +1,344 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + IDCAMS_delete_run_name, + IDCAMS_delete, + IEFBR14_create_stderr, + LISTDS_data_set_doesnt_exist, + LISTDS_data_set, + LISTDS_run_name, + set_module_args +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules import aux_trace +import pytest +import sys + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +__metaclass__ = type + +NAMEA = "TEST.REGIONS.DFHAUXT" +NAMEB = "TEST.REGIONS.DFHBUXT" + +default_arg_parms = { + "space_primary": 20, + "space_secondary": 3, + "space_type": "M", + "region_data_sets": { + "dfhauxt": { + "dsn": NAMEA + }, + "dfhbuxt": { + "dsn": NAMEB + } + }, + "state": "initial", + "destination": "A" +} + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + trace_module = aux_trace.AnsibleAuxiliaryTraceModule() + trace_module._module.fail_json = MagicMock(return_value=None) + trace_module._module.exit_json = MagicMock(return_value=None) + return trace_module + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_aux_trace(): + trace_module = initialise_module() + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEA), ""), + MVSCmdResponse(0, LISTDS_data_set(NAMEA, "PS"), ""), + ] + ) + data_set_utils._execute_iefbr14 = MagicMock( + return_value=MVSCmdResponse( + rc=0, stdout="", stderr=IEFBR14_create_stderr(NAMEA, "DFHAUXT") + ) + ) + + trace_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + _execution( + name="IEFBR14 - dfhauxt - Run 1", + rc=0, + stdout="", + stderr=IEFBR14_create_stderr(NAMEA, "DFHAUXT") + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEA, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + changed=True, + failed=False, + msg="", + ) + assert trace_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_aux_trace(): + trace_module = initialise_module(state="absent") + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(NAMEA), "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAMEA, "PS"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEA), ""), + ] + ) + + trace_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEA, "PS"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, NAMEA), + rc=0, + stdout=IDCAMS_delete(NAMEA), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Sequential" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + ) + assert trace_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_remove_non_existent_aux_trace(): + trace_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEA), "") + ) + + trace_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + ) + assert trace_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_destination_b_aux_trace(): + trace_module = initialise_module(destination="B") + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEB), ""), + MVSCmdResponse(0, LISTDS_data_set(NAMEB, "PS"), ""), + ] + ) + data_set_utils._execute_iefbr14 = MagicMock( + return_value=MVSCmdResponse( + rc=0, stdout="", stderr=IEFBR14_create_stderr(NAMEB, "DFHBUXT") + ) + ) + + trace_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEB), + stderr="", + ), + _execution( + name="IEFBR14 - dfhbuxt - Run 1", + rc=0, + stdout="", + stderr=IEFBR14_create_stderr(NAMEB, "DFHBUXT")), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEB, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + changed=True, + failed=False, + msg="", + ) + assert trace_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_on_non_existent_aux(): + aux_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEA), "") + ) + + aux_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + failed=True, + changed=False, + msg="Data set {0} does not exist.".format(NAMEA), + ) + assert aux_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_success_aux(): + aux_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(0, LISTDS_data_set(NAMEA, "PS"), "") + ) + + aux_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEA, "PS"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEA, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Sequential" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + failed=False, + changed=False, + msg="", + ) + assert aux_module.get_result() == expected_result diff --git a/tests/unit/modules/test_cmci_action.py b/tests/unit/modules/test_cmci_action.py index f95b71dc..4480f370 100644 --- a/tests/unit/modules/test_cmci_action.py +++ b/tests/unit/modules/test_cmci_action.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/tests/unit/modules/test_cmci_create.py b/tests/unit/modules/test_cmci_create.py index dbf864e7..12c2dfbb 100644 --- a/tests/unit/modules/test_cmci_create.py +++ b/tests/unit/modules/test_cmci_create.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function from collections import OrderedDict diff --git a/tests/unit/modules/test_cmci_delete.py b/tests/unit/modules/test_cmci_delete.py index 3fc49272..8ae3e83f 100644 --- a/tests/unit/modules/test_cmci_delete.py +++ b/tests/unit/modules/test_cmci_delete.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/tests/unit/modules/test_cmci_filters.py b/tests/unit/modules/test_cmci_filters.py index 51da7887..515320d1 100644 --- a/tests/unit/modules/test_cmci_filters.py +++ b/tests/unit/modules/test_cmci_filters.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/tests/unit/modules/test_cmci_get.py b/tests/unit/modules/test_cmci_get.py index 731c493f..08b5b525 100644 --- a/tests/unit/modules/test_cmci_get.py +++ b/tests/unit/modules/test_cmci_get.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020 +# (c) Copyright IBM Corp. 2020,2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/tests/unit/modules/test_cmci_update.py b/tests/unit/modules/test_cmci_update.py index 83700620..762dcac9 100644 --- a/tests/unit/modules/test_cmci_update.py +++ b/tests/unit/modules/test_cmci_update.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/tests/unit/modules/test_csd.py b/tests/unit/modules/test_csd.py new file mode 100644 index 00000000..4b9e8f49 --- /dev/null +++ b/tests/unit/modules/test_csd.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _csd as csd_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _icetool as icetool +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + CSDUP_name, + CSDUP_stderr, + CSDUP_initialize_stdout, + ICETOOL_name, + ICETOOL_stderr, + ICETOOL_stdout, + IDCAMS_delete_run_name, + IDCAMS_delete, + IDCAMS_create_run_name, + LISTDS_data_set_doesnt_exist, + LISTDS_data_set, + LISTDS_run_name, + set_module_args +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules import csd +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import StdinDefinition +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse +import pytest +import sys + + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +__metaclass__ = type + + +NAME = "TEST.REGIONS.CSD" + +default_arg_parms = { + "space_primary": 5, + "space_secondary": 3, + "space_type": "M", + "region_data_sets": { + "dfhcsd": { + "dsn": NAME + } + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.INSTALL.SDFHLOAD" + }, + "state": "initial", +} + + +def setUp(): + StdinDefinition.__init__ = MagicMock(return_value=None) + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + csd_module = csd.AnsibleCSDModule() + # Mock Ansible module fail and exits, this prevents sys.exit being called but retains an accurate results + csd_module._module.fail_json = MagicMock(return_value=None) + csd_module._module.exit_json = MagicMock(return_value=None) + return csd_module + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_csd(): + setUp() + csd_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, NAME, "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + csd_utils._execute_dfhcsdup = MagicMock( + side_effect=[ + MVSCmdResponse(rc=0, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME)) + ] + ) + + csd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=CSDUP_name(), + rc=0, + stdout=CSDUP_initialize_stdout(NAME), + stderr=CSDUP_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ) + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert csd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_csd(): + setUp() + csd_module = initialise_module(state="absent") + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(NAME), "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + ] + ) + + csd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + ) + assert csd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_do_nothing_to_an_existing_csd(): + setUp() + csd_module = initialise_module() + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), "") + ] + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr() + ) + ) + csd_utils._execute_dfhcsdup = MagicMock( + side_effect=[ + MVSCmdResponse(rc=0, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME)) + ] + ) + + csd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr() + ), + _execution( + name=CSDUP_name(), + rc=0, + stdout=CSDUP_initialize_stdout(NAME), + stderr=CSDUP_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=False, + failed=False, + msg="", + ) + assert csd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_remove_non_existent_csd(): + setUp() + csd_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + + csd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + ) + assert csd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_start_a_existing_csd(): + setUp() + csd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ) + ) + + csd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr(), + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=False, + failed=False, + msg="", + ) + assert csd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_error_warm_start_a_unused_csd(): + setUp() + csd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr() + ) + ) + csd_module.main() + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr()), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=False, + failed=True, + msg="Data set {0} is empty.".format(NAME), + ) + assert csd_module.get_result() == expected_result + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_error_warm_start_a_non_existent_csd(): + setUp() + csd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse( + 8, LISTDS_data_set_doesnt_exist(NAME), "")) + + csd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr=""), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr=""), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + failed=True, + changed=False, + msg="Data set {0} does not exist.".format(NAME), + ) + assert csd_module.get_result() == expected_result + + +@pytest.mark.skipif(sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE) +def test_bad_response_from_csdup(): + setUp() + csd_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, NAME, "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + csd_utils._execute_dfhcsdup = MagicMock( + return_value=MVSCmdResponse(rc=99, stdout=CSDUP_initialize_stdout(NAME), stderr=CSDUP_stderr(NAME)) + ) + + csd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="" + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=CSDUP_name(), + rc=99, + stdout=CSDUP_initialize_stdout(NAME), + stderr=CSDUP_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=True, + msg="DFHCSDUP failed with RC 99", + ) + assert csd_module.get_result() == expected_result diff --git a/tests/unit/modules/test_global_catalog.py b/tests/unit/modules/test_global_catalog.py new file mode 100644 index 00000000..0c9985c0 --- /dev/null +++ b/tests/unit/modules/test_global_catalog.py @@ -0,0 +1,661 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _global_catalog as global_catalog_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + ICETOOL_name, + ICETOOL_stderr, + ICETOOL_stdout, + IDCAMS_create_stdout, + IDCAMS_delete_run_name, + IDCAMS_delete, + IDCAMS_create_run_name, + LISTDS_data_set_doesnt_exist, + LISTDS_data_set, + LISTDS_run_name, + RMUTL_get_run_name, + RMUTL_stderr, + RMUTL_stdout, + RMUTL_update_run_name, + set_module_args +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules import global_catalog +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _icetool as icetool +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse +import pytest +import sys + + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +__metaclass__ = type + +NAME = "TEST.REGIONS.GCD" + +default_arg_parms = { + "space_primary": 5, + "space_secondary": 3, + "space_type": "M", + "region_data_sets": { + "dfhgcd": { + "dsn": NAME + } + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.INSTALL.SDFHLOAD" + }, + "state": "initial" +} + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + gcd_module = global_catalog.AnsibleGlobalCatalogModule() + # Mock Ansible module fail and exits, this prevents sys.exit being called but retains an accurate results + gcd_module._module.fail_json = MagicMock(return_value=None) + gcd_module._module.exit_json = MagicMock(return_value=None) + return gcd_module + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_global_catalog(): + gcd_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_create_stdout(NAME), "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + global_catalog_utils._execute_dfhrmutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), stderr=RMUTL_stderr(NAME)) + ) + + gcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=IDCAMS_create_stdout(NAME), + stderr="", + ), + _execution( + name=RMUTL_update_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE", + next_start="", + autostart_override="" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOINIT" + ), + changed=True, + failed=False, + msg="", + ) + assert gcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_global_catalog(): + gcd_module = initialise_module(state="absent") + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(NAME), ""), + ) + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + ] + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr() + ) + ) + global_catalog_utils._execute_dfhrmutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), stderr=RMUTL_stderr(NAME)) + ) + + gcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOINIT" + ), + end_state=dict( + exists=False, + data_set_organization="NONE", + next_start="", + autostart_override="" + ), + changed=True, + failed=False, + msg="", + ) + assert gcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_remove_non_existent_global_catalog(): + gcd_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + gcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE", + next_start="", + autostart_override="" + ), + end_state=dict( + exists=False, + data_set_organization="NONE", + next_start="", + autostart_override="" + ), + changed=False, + failed=False, + msg="", + ) + assert gcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_start_a_global_catalog(): + gcd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + global_catalog_utils._execute_dfhrmutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout=RMUTL_stdout("AUTOASIS", "UNKNOWN"), stderr=RMUTL_stderr(NAME)) + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ) + ) + + gcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOASIS", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ), + _execution( + name=RMUTL_update_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOASIS", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOASIS", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOASIS" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOASIS" + ), + changed=True, + failed=False, + msg="", + ) + assert gcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_error_warm_start_a_unused_global_catalog(): + gcd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + global_catalog_utils._execute_dfhrmutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), stderr=RMUTL_stderr(NAME)) + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr() + ) + ) + gcd_module.main() + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="" + ), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr()), + _execution( + name=RMUTL_update_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="" + ), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOINIT" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOINIT" + ), + changed=True, + failed=True, + msg="Unused catalog. The catalog must be used by CICS before doing a warm start.", + ) + assert gcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_error_warm_start_a_non_existent_global_catalog(): + gcd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + global_catalog_utils._execute_dfhrmutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), stderr=RMUTL_stderr(NAME)) + ) + + gcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="" + ), + _execution( + name=RMUTL_update_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="" + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE", + next_start="", + autostart_override="" + ), + end_state=dict( + exists=False, + data_set_organization="NONE", + next_start="", + autostart_override="" + ), + failed=True, + changed=True, + msg="Data set {0} does not exist.".format(NAME), + ) + assert gcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def tests_cold_start_non_existent_catalog(): + gcd_module = initialise_module(state="cold") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + global_catalog_utils._execute_dfhrmutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout=RMUTL_stdout("AUTOCOLD", "UNKNOWN"), stderr=RMUTL_stderr(NAME)) + ) + + gcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="" + ), + _execution( + name=RMUTL_update_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOCOLD", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="" + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE", + next_start="", + autostart_override="" + ), + end_state=dict( + exists=False, + data_set_organization="NONE", + next_start="", + autostart_override="" + ), + failed=True, + changed=True, + msg="Data set {0} does not exist.".format(NAME), + ) + assert gcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_cold_start_unused_catalog(): + gcd_module = initialise_module(state="cold") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), "") + ) + global_catalog_utils._execute_dfhrmutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), stderr=RMUTL_stderr(NAME)) + ) + + gcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=RMUTL_update_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOINIT", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOINIT" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOINIT" + ), + changed=True, + failed=True, + msg="Unused catalog. The catalog must be used by CICS before doing a cold start.", + ) + assert gcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_cold_start_global_catalog(): + gcd_module = initialise_module(state="cold") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), "") + ) + global_catalog_utils._execute_dfhrmutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout=RMUTL_stdout("AUTOCOLD", "UNKNOWN"), stderr=RMUTL_stderr(NAME)) + ) + + gcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="" + ), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOCOLD", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=RMUTL_update_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOCOLD", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="" + ), + _execution( + name=RMUTL_get_run_name(1), + rc=0, + stdout=RMUTL_stdout("AUTOCOLD", "UNKNOWN"), + stderr=RMUTL_stderr(NAME) + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOCOLD" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM", + next_start="UNKNOWN", + autostart_override="AUTOCOLD" + ), + changed=True, + failed=False, + msg="", + ) + assert gcd_module.get_result() == expected_result diff --git a/tests/unit/modules/test_local_catalog.py b/tests/unit/modules/test_local_catalog.py new file mode 100644 index 00000000..51d38bc3 --- /dev/null +++ b/tests/unit/modules/test_local_catalog.py @@ -0,0 +1,537 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _local_catalog as local_catalog_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _icetool as icetool +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + CCUTL_name, + CCUTL_stderr, + ICETOOL_name, + ICETOOL_stderr, + ICETOOL_stdout, + IDCAMS_delete_run_name, + IDCAMS_delete, + IDCAMS_create_run_name, + LISTDS_data_set_doesnt_exist, + LISTDS_data_set, + LISTDS_run_name, + set_module_args +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules import local_catalog +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse +import pytest +import sys + + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +__metaclass__ = type + +NAME = "TEST.REGIONS.LCD" + +default_arg_parms = { + "space_primary": 5, + "space_secondary": 3, + "space_type": "M", + "region_data_sets": { + "dfhlcd": { + "dsn": NAME + } + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.INSTALL.SDFHLOAD" + }, + "state": "initial", +} + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + lcd_module = local_catalog.AnsibleLocalCatalogModule() + # Mock Ansible module fail and exits, this prevents sys.exit being called but retains an accurate results + lcd_module._module.fail_json = MagicMock(return_value=None) + lcd_module._module.exit_json = MagicMock(return_value=None) + return lcd_module + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_local_catalog(): + lcd_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, NAME, "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + local_catalog_utils._execute_dfhccutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout="", stderr=CCUTL_stderr(NAME)) + ) + + lcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=CCUTL_name(), + rc=0, + stdout="", + stderr=CCUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ) + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert lcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_local_catalog(): + lcd_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + ] + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr() + ) + ) + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(NAME), ""), + ) + + lcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + ) + assert lcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_local_catalog_and_replace(): + lcd_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + side_effect=[ + MVSCmdResponse(0, IDCAMS_delete(NAME), ""), + MVSCmdResponse(0, NAME, ""), + ] + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + icetool._execute_icetool = MagicMock( + return_value=( + MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ) + ) + ) + local_catalog_utils._execute_dfhccutl = MagicMock( + return_value=MVSCmdResponse(rc=0, stdout="", stderr=CCUTL_stderr(NAME)) + ) + + lcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=CCUTL_name(), + rc=0, + stdout="", + stderr=CCUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert lcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_remove_non_existent_local_catalog(): + lcd_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + + lcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + ) + assert lcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_start_a_local_catalog(): + lcd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ) + ) + + lcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr(), + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=False, + failed=False, + msg="", + ) + assert lcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_error_warm_start_a_unused_local_catalog(): + lcd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + icetool._execute_icetool = MagicMock( + return_value=MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr() + ) + ) + lcd_module.main() + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="" + ), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(0), + stderr=ICETOOL_stderr() + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="" + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=False, + failed=True, + msg="Data set {0} is empty.".format(NAME), + ) + assert lcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_error_warm_start_a_non_existent_local_catalog(): + lcd_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse( + 8, LISTDS_data_set_doesnt_exist(NAME), "")) + + lcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="" + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + failed=True, + changed=False, + msg="Data set {0} does not exist.".format(NAME), + ) + assert lcd_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_bad_response_from_ccutl(): + lcd_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, NAME, "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + local_catalog_utils._execute_dfhccutl = MagicMock( + return_value=MVSCmdResponse(rc=99, stdout="", stderr=CCUTL_stderr(NAME)) + ) + + lcd_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="" + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=CCUTL_name(), + rc=99, + stdout="", + stderr=CCUTL_stderr(NAME) + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=True, + msg="DFHCCUTL failed with RC 99", + ) + assert lcd_module.get_result() == expected_result diff --git a/tests/unit/modules/test_local_request_queue.py b/tests/unit/modules/test_local_request_queue.py new file mode 100644 index 00000000..e71dc6a8 --- /dev/null +++ b/tests/unit/modules/test_local_request_queue.py @@ -0,0 +1,368 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils, _icetool as icetool +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + ICETOOL_name, + ICETOOL_stderr, + ICETOOL_stdout, + IDCAMS_delete_run_name, + IDCAMS_delete, + IDCAMS_create_run_name, + LISTDS_data_set_doesnt_exist, + LISTDS_data_set, + LISTDS_run_name, + set_module_args +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules import local_request_queue +import pytest +import sys + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +__metaclass__ = type + +NAME = "TEST.REGIONS.LRQ" + +default_arg_parms = { + "space_primary": 5, + "space_secondary": 3, + "space_type": "M", + "region_data_sets": { + "dfhlrq": { + "dsn": NAME + } + }, + "state": "initial", +} + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + lrq_module = local_request_queue.AnsibleLocalRequestQueueModule() + lrq_module._module.fail_json = MagicMock(return_value=None) + lrq_module._module.exit_json = MagicMock(return_value=None) + return lrq_module + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_local_request_queue(): + lrq_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, NAME, "")) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + + lrq_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert lrq_module.result == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_local_request_queue(): + lrq_module = initialise_module(state="absent") + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(NAME), "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + ] + ) + + lrq_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + ) + assert lrq_module.result == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_lrq_and_replace(): + lrq_module = initialise_module() + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + data_set_utils._execute_idcams = MagicMock( + side_effect=[ + MVSCmdResponse(0, IDCAMS_delete(NAME), ""), + MVSCmdResponse(0, NAME, ""), + ] + ) + icetool._execute_icetool = MagicMock( + return_value=( + MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ) + ) + ) + + lrq_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert lrq_module.result == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_remove_non_existent_lrq(): + lrq_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + + lrq_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + ) + assert lrq_module.result == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_on_non_existent_lrq(): + lrq_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse( + 8, LISTDS_data_set_doesnt_exist(NAME), "")) + + lrq_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr=""), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr=""), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=True, + msg="Data set {0} does not exist.".format(NAME), + ) + assert lrq_module.result == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_success_lrq(): + lrq_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + + lrq_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=False, + failed=False, + msg="", + ) + assert lrq_module.result == expected_result diff --git a/tests/unit/modules/test_region_jcl.py b/tests/unit/modules/test_region_jcl.py new file mode 100644 index 00000000..90d7ba38 --- /dev/null +++ b/tests/unit/modules/test_region_jcl.py @@ -0,0 +1,2010 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +import json + +from _pytest.monkeypatch import MonkeyPatch +from ansible.module_utils.common.text.converters import to_bytes +from ansible.module_utils import basic +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.region_jcl import ( + AnsibleRegionJCLModule as StartCICSModule, DFHSIP, PGM, DISP, DSN, SHR +) +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._jcl_helper import ( + NAME, DDS +) +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + IDCAMS_delete, + IDCAMS_delete_run_name, + IEFBR14_create_stderr, + LISTDS_data_set, + LISTDS_data_set_doesnt_exist, + LISTDS_member_doesnt_exist, + LISTDS_run_name, + LISTSDS_member_data_set, + get_sample_generated_JCL, + get_sample_generated_JCL_args +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse +import pytest +import sys + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +DS_NAME = "TEST.DATA.START" + +default_arg_parms = { + "state": "initial", + "applid": "APPLID", + "cics_data_sets": {}, + "le_data_sets": {}, + "output_data_sets": {}, + "region_data_sets": { + "dfhstart": { + "dsn": DS_NAME + } + } +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) + basic._ANSIBLE_ARGS = to_bytes(args) + + +def setup_and_update_parms(args): + parms = default_arg_parms + parms.update(args) + set_module_args(parms) + dfhsip = StartCICSModule() + dfhsip._remove_none_values_from_dict(dfhsip._module.params) + return dfhsip + + +def prepare_for_fail(): + monkey_patch = MonkeyPatch() + monkey_patch.setattr(basic.AnsibleModule, "fail_json", fail_json) + + +def prepare_for_exit(): + monkey_patch = MonkeyPatch() + monkey_patch.setattr(basic.AnsibleModule, "exit_json", exit_json) + + +def fail_json(*args, **kwargs): + """function to patch over fail_json; package return data into an exception""" + kwargs['failed'] = True + raise AnsibleFailJson(kwargs) + + +def exit_json(*args, **kwargs): + """function to patch over exit_json; package return data into an exception""" + kwargs['failed'] = False + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + pass + + +def test_populate_job_card_dict_with_job_name(): + module = setup_and_update_parms({ + "job_parameters": {"job_name": "STRTCICS"}}) + module._populate_job_card_dict() + assert module.jcl_helper.job_data['job_card'] == {"job_name": "STRTCICS"} + + +def test_populate_job_card_dict_without_job_name(): + module = setup_and_update_parms({}) + module._populate_job_card_dict() + assert module.jcl_helper.job_data['job_card'] == {"job_name": "APPLID"} + + +def test_populate_job_card_dict_without_job_name_but_with_region(): + module = setup_and_update_parms({ + "job_parameters": {"region": "0M"}}) + module._populate_job_card_dict() + assert module.jcl_helper.job_data['job_card'] == {"job_name": "APPLID", "region": "0M"} + + +def test_add_exec_parameters(): + module = setup_and_update_parms({}) + module._add_exec_parameters({NAME: "", PGM: DFHSIP, DDS: {}}) + print(module.jcl_helper.job_data['execs']) + assert module.jcl_helper.job_data['execs'] == [{'name': '', 'pgm': DFHSIP, 'dds': {}}] + + +def test_add_exec_parameters_with_sit_parameters(): + module = setup_and_update_parms({"sit_parameters": {"start": "COLD"}}) + module._add_exec_parameters({NAME: "", PGM: DFHSIP, DDS: {}}) + print(module.jcl_helper.job_data['execs']) + assert module.jcl_helper.job_data['execs'] == [{'name': '', 'pgm': DFHSIP, 'dds': {}, 'PARM': 'SI'}] + + +def test_add_block_of_libraries_empty_libraries(): + module = setup_and_update_parms({"steplib": {"top_data_sets": [], "data_sets": []}}) + module._add_block_of_libraries("steplib") + assert module.dds == [] + + +def test_add_block_of_libraries_empty_top_data_sets(): + module = setup_and_update_parms({"steplib": {"top_data_sets": [], "data_sets": ["LIB.ONE"]}}) + module._add_block_of_libraries("steplib") + assert module.dds == [{"steplib": [{DISP: SHR, DSN: "LIB.ONE"}]}] + + +def test_add_block_of_libraries_dict_is_none(): + set_module_args(default_arg_parms) + dfhsip = StartCICSModule() + dfhsip._module.params.pop("steplib") + dfhsip._add_block_of_libraries("steplib") + assert dfhsip.dds == [] + + +def test_get_delimiter_when_dlm_not_needed(): + set_module_args(default_arg_parms) + dfhsip = StartCICSModule() + dlm = dfhsip._get_delimiter(["value1=one", "value2=two", "value3=three"]) + assert dlm is None + + +def test_get_delimiter(): + set_module_args(default_arg_parms) + dfhsip = StartCICSModule() + dlm = dfhsip._get_delimiter(["value1=one", "value2=two/*", "value3=three"]) + assert dlm == "@@" + + +def test_find_unused_character(): + content = [ + "Hello" + "instream_content", + ] + dlm = StartCICSModule._find_unused_character(content) + assert dlm == "@@" + + +def test_find_unused_character_with_some_preferred_chars_used(): + content = [ + "Hello", + "instream_content", + "@@$$##" + ] + dlm = StartCICSModule._find_unused_character(content) + assert dlm == "@$" + + +def test_find_unused_character_with_all_preferred_chars_used(): + content = [ + "Hello", + "instream_content", + "@@$$##@#$@" + ] + dlm = StartCICSModule._find_unused_character(content) + assert dlm == "@$" + + +def test_find_unused_character_with_preferred_chars_and_first_combinations_used(): + content = [ + "Hello", + "AAABBBCC" + "instream_content", + "@@$$##@#$@" + ] + dlm = StartCICSModule._find_unused_character(content) + assert dlm == "@$" + + +def test_find_unused_character_with_preferred_chars_used(): + content = [ + "Hello", + "instream_content", + "@@", + "$$", + "##", + "@#", + "$@", + "#@", + "@$", + "$#", + "#$"] + dlm = StartCICSModule._find_unused_character(content) + assert dlm == "@A" + + +def test_validate_content(): + prepare_for_exit() + set_module_args(default_arg_parms) + module = StartCICSModule() + content = ["LISTCAT ENTRIES('SOME.data_set.*')", + "LISTCAT ENTRIES('SOME.OTHER.DS.*')"] + module._validate_content(content) + module._exit() + assert module.result["failed"] is False + + +def test_validate_content_with_passing_jcl(): + prepare_for_exit() + set_module_args(default_arg_parms) + module = StartCICSModule() + content = ["//TEST DD DISP=SHR,DSN=TEST.DATA"] + module._validate_content(content) + module._exit() + assert module.result["failed"] is False + + +def test_validate_content_with_invalid_content_dd_data(): + prepare_for_fail() + set_module_args(default_arg_parms) + module = StartCICSModule() + content = ["//TEST DD DISP=SHR,DSN=TEST.DATA", "//TEST2 DD DATA"] + with pytest.raises(AnsibleFailJson) as exec_info: + module._validate_content(content) + expected = "Invalid content for an in-stream: DD DATA" + assert exec_info.value.args[0]['msg'] == expected + assert module.result["failed"] is True + + +def test_validate_content_with_invalid_content_DD_instream(): + prepare_for_fail() + set_module_args(default_arg_parms) + module = StartCICSModule() + content = ["//TEST DD DISP=SHR,DSN=TEST.DATA", "//TEST2 DD *"] + with pytest.raises(AnsibleFailJson) as exec_info: + module._validate_content(content) + expected = "Invalid content for an in-stream: DD *" + assert exec_info.value.args[0]['msg'] == expected + assert module.result["failed"] is True + + +def test_check_for_existing_dlm_within_content_true(): + set_module_args(default_arg_parms) + module = StartCICSModule() + content = ["RUN PROGRAM", "/* RUNNING */"] + end_stream_present = module._check_for_existing_dlm_within_content( + content) + assert end_stream_present is True + + +def test_check_for_existing_dlm_within_content_falce(): + set_module_args(default_arg_parms) + module = StartCICSModule() + content = ["RUN PROGRAM", "HELLO WORLD"] + end_stream_present = module._check_for_existing_dlm_within_content( + content) + assert end_stream_present is False + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_add_output_data_sets_with_global_default(): + module = setup_and_update_parms({ + "output_data_sets": {"default_sysout_class": "A"}}) + module._add_output_data_sets() + assert module.dds == [{"ceemsg": [{"sysout": "A"}]}, {"ceeout": [{"sysout": "A"}]}, {"msgusr": [{"sysout": "A"}]}, + {"sysprint": [{"sysout": "A"}]}, {"sysudump": [{"sysout": "A"}]}, + {"sysabend": [{"sysout": "A"}]}, {"sysout": [{"sysout": "A"}]}, + {"dfhcxrf": [{"sysout": "A"}]}, {"logusr": [{"sysout": "A"}]}] + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_add_output_data_sets_without_global_default(): + module = setup_and_update_parms({}) + module._add_output_data_sets() + assert module.dds == [{"ceemsg": [{"sysout": "*"}]}, {"ceeout": [{"sysout": "*"}]}, {"msgusr": [{"sysout": "*"}]}, + {"sysprint": [{"sysout": "*"}]}, {"sysudump": [{"sysout": "*"}]}, + {"sysabend": [{"sysout": "*"}]}, {"sysout": [{"sysout": "*"}]}, + {"dfhcxrf": [{"sysout": "*"}]}, {"logusr": [{"sysout": "*"}]}] + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_add_output_data_sets_with_overrides_and_global_default(): + module = setup_and_update_parms({"output_data_sets": { + "default_sysout_class": "A", "ceemsg": {"sysout": "B"}, + "logusr": {"sysout": "*"} + }}) + module._add_output_data_sets() + assert module.dds == [{"ceemsg": [{"sysout": "B"}]}, {"logusr": [{"sysout": "*"}]}, {"ceeout": [{"sysout": "A"}]}, + {"msgusr": [{"sysout": "A"}]}, {"sysprint": [{"sysout": "A"}]}, + {"sysudump": [{"sysout": "A"}]}, {"sysabend": [{"sysout": "A"}]}, + {"sysout": [{"sysout": "A"}]}, {"dfhcxrf": [{"sysout": "A"}]}] + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_add_output_data_sets_with_overrides_and_omit(): + module = setup_and_update_parms({"output_data_sets": { + "default_sysout_class": "A", "ceemsg": {"sysout": "B"}, + "logusr": {"omit": True} + }}) + module._add_output_data_sets() + assert module.dds == [{"ceemsg": [{"sysout": "B"}]}, {"ceeout": [{"sysout": "A"}]}, + {"msgusr": [{"sysout": "A"}]}, {"sysprint": [{"sysout": "A"}]}, + {"sysudump": [{"sysout": "A"}]}, {"sysabend": [{"sysout": "A"}]}, + {"sysout": [{"sysout": "A"}]}, {"dfhcxrf": [{"sysout": "A"}]}] + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_add_output_data_sets_without_global_default_and_with_override(): + module = setup_and_update_parms({"output_data_sets": {"ceemsg": {"sysout": "B"}, "logusr": {"omit": True}}}) + module._add_output_data_sets() + assert module.dds == [{"ceemsg": [{"sysout": "B"}]}, {"ceeout": [{"sysout": "*"}]}, {"msgusr": [{"sysout": "*"}]}, + {"sysprint": [{"sysout": "*"}]}, {"sysudump": [{"sysout": "*"}]}, + {"sysabend": [{"sysout": "*"}]}, {"sysout": [{"sysout": "*"}]}, + {"dfhcxrf": [{"sysout": "*"}]}] + + +def test_remove_omited_data_set(): + module = setup_and_update_parms({}) + data_set = "ceeout" + user_data_sets = {data_set: {"omit": True}} + module._remove_omitted_data_set(data_set, user_data_sets) + assert user_data_sets == {} + + +def test_remove_omited_data_set_with_omit_false(): + module = setup_and_update_parms({}) + data_set = "ceeout" + user_data_sets = {data_set: {"omit": False}} + module._remove_omitted_data_set(data_set, user_data_sets) + assert user_data_sets == {"ceeout": {"omit": False}} + + +def test_remove_omited_data_set_not_present(): + data_set = "ceemsg" + module = setup_and_update_parms({}) + user_data_sets = {"ceeout": {"omit": True}} + module._remove_omitted_data_set(data_set, user_data_sets) + assert user_data_sets == {"ceeout": {"omit": True}} + + +def test_set_sysout_class_for_data_set_no_override(): + module = setup_and_update_parms({}) + data_set = "ceemsg" + default_class = "A" + user_data_sets = {'ceeout': {'sysout': 'B'}} + module._set_sysout_class_for_data_set( + data_set, default_class, user_data_sets) + assert user_data_sets == {"ceeout": { + "sysout": "B"}, data_set: {"sysout": default_class}} + + +def test_set_sysout_class_for_data_set_with_override(): + module = setup_and_update_parms({}) + data_set = "ceemsg" + default_class = "A" + user_data_sets = {"ceemsg": {"sysout": "B"}} + module._set_sysout_class_for_data_set( + data_set, default_class, user_data_sets) + assert user_data_sets == {"ceemsg": {"sysout": "B"}} + + +def test_add_per_region_data_sets(): + module = setup_and_update_parms({"region_data_sets": {"dfhcsd": {"dsn": "TEST.DATA.DFHCSD"}, + "dfhtemp": {"dsn": "TEST.DATA.DFHTEMP"}, + "dfhstart": {"dsn": DS_NAME}}}) + module._add_per_region_data_sets() + assert module.dds == [{"dfhcsd": [{"dsn": "TEST.DATA.DFHCSD", "disp": "SHR"}]}, + {"dfhtemp": [{"dsn": "TEST.DATA.DFHTEMP", "disp": "SHR"}]}] + + +def test_add_libraries(): + module = setup_and_update_parms({}) + dsn_dict = module._add_libraries(["LIB.ONE", "LIB.TWO", "LIB.THREE"]) + assert dsn_dict == [{"dsn": "LIB.ONE", "disp": "SHR"}, + {"dsn": "LIB.TWO", "disp": "SHR"}, + {"dsn": "LIB.THREE", "disp": "SHR"}] + + +def test_add_libraries_with_none_value(): + module = setup_and_update_parms({}) + dsn_dict = module._add_libraries(["LIB.ONE", None, "LIB.THREE"]) + assert dsn_dict == [{"dsn": "LIB.ONE", "disp": "SHR"}, + {"dsn": "LIB.THREE", "disp": "SHR"}] + + +def test_add_libraries_with_none_passed(): + module = setup_and_update_parms({}) + dsn_dict = module._add_libraries([]) + assert dsn_dict == [] + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_add_sit_parameters(): + module = setup_and_update_parms({"sit_parameters": {}}) + # All sit parms have been added automatically and set as None. + module._module.params["sit_parameters"]["AICONS"] = "AUTO" + module._add_sit_parameters() + assert module.dds == [{"sysin": {"content": ["AICONS=AUTO", "APPLID=APPLID"]}}] + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_add_sit_parameters_with_dictionaries_in_sit_parms(): + module = setup_and_update_parms({"sit_parameters": {}}) + # All sit parms have been added automatically and set as None. + module._module.params["sit_parameters"]["AICONS"] = "AUTO" + module._module.params["sit_parameters"]["strnxx"] = { + "ap": "VAL1", "aq": "VAL2"} + + module._add_sit_parameters() + print(module.dds) + assert module.dds == [ + {"sysin": {"content": ["AICONS=AUTO", "APPLID=APPLID", "STRNAP=VAL1", "STRNAQ=VAL2"]}}] + + +def test_add_sit_parameters_when_none(): + module = setup_and_update_parms({"sit_parameters": None}) + # All sit parms have been added automatically and set as None. + module.dds = [] + module._add_sit_parameters() + assert module.dds == [] + + +def test_manage_dictionaries_in_sit_parameters(): + dictionary_of_values = {"Param1": "value1", + "paramxxx": {"VAL": "TRUE", "NUM": "TWO"}} + module = setup_and_update_parms({}) + module._manage_dictionaries_in_sit_parameters(dictionary_of_values) + assert dictionary_of_values == { + "Param1": "value1", "paramVAL": "TRUE", "paramNUM": "TWO"} + + +def test_validate_dictionary_value_within_sit_parms(): + prepare_for_exit() + string_with_trailing_x = "paramxx" + value = "ap" + module = setup_and_update_parms({}) + module._validate_dictionary_value_within_sit_parms( + string_with_trailing_x, value) + module._exit() + assert module.result["failed"] is False + + +def test_validate_dictionary_value_within_sit_parms_skr_4_letters(): + prepare_for_exit() + string_with_trailing_x = "SKRXXXX" + value = "PA24" + module = setup_and_update_parms({}) + module._validate_dictionary_value_within_sit_parms( + string_with_trailing_x, value) + module._exit() + assert module.result["failed"] is False + + +def test_validate_dictionary_value_within_sit_parms_skr_3_letters(): + prepare_for_exit() + string_with_trailing_x = "SKRXXXX" + value = "PA1" + module = setup_and_update_parms({}) + module._validate_dictionary_value_within_sit_parms( + string_with_trailing_x, value) + module._exit() + assert module.result["failed"] is False + + +def test_validate_dictionary_value_within_sit_parms_skr_5_letters(): + string_with_trailing_x = "SKRXXXX" + value = "PA015" + prepare_for_fail() + module = setup_and_update_parms({}) + with pytest.raises(AnsibleFailJson) as exec_info: + module._validate_dictionary_value_within_sit_parms( + string_with_trailing_x, value) + expected = "Invalid key: PA015. Key must be a length of 3 or 4." + assert exec_info.value.args[0]['msg'] == expected + assert module.result["failed"] is True + + +def test_validate_dictionary_value_within_sit_parms_value_length_doesnt_match_trailing_x(): + string_with_trailing_x = "STRNRXXX" + value = "VAL2" + prepare_for_fail() + module = setup_and_update_parms({}) + with pytest.raises(AnsibleFailJson) as exec_info: + module._validate_dictionary_value_within_sit_parms( + string_with_trailing_x, value) + expected = "Invalid key: VAL2. Key must be the same length as the x's within STRNRXXX." + assert exec_info.value.args[0]['msg'] == expected + assert module.result["failed"] is True + + +def test_remove_none_values_from_dict(): + module = setup_and_update_parms({}) + # All sit parms have been added automatically and set as None. + # Assert they've been added. + arg_spec = {"sit_parameters": {"one": 1, "two": 2}, + "data_sets": {"auxt": {"disp": "SHR", "omit": None}, "buxt": None}} + module._remove_none_values_from_dict(arg_spec) + assert arg_spec == {"sit_parameters": {"one": 1, "two": 2}, + "data_sets": {"auxt": {"disp": "SHR"}}} + + +def test_check_parameter_is_provided(): + module = setup_and_update_parms({}) + assert module._check_parameter_is_provided("applid") is True + + +def test_check_parameter_is_provided_when_its_absent(): + dfhsip = setup_and_update_parms({}) + assert dfhsip._check_parameter_is_provided("dfhcsd") is False + + +def test_fail(): + prepare_for_fail() + module = setup_and_update_parms({}) + expected_message = "Module failed for test" + with pytest.raises(AnsibleFailJson) as message: + module._fail(expected_message) + assert message.value.args[0]['msg'] == expected_message + assert module.result["failed"] + + +def test_concat_libraries_both_provided(): + prepare_for_exit() + data_sets = ["FIRST.LIB", "SECOND.LIB"] + top_data_sets = ["FIRST.TOP.LIB", "SECOND.TOP.LIB"] + module = setup_and_update_parms({ + "steplib": {"data_sets": data_sets, "top_data_sets": top_data_sets}}) + concat_libs = module._concat_libraries("steplib") + assert concat_libs == top_data_sets + data_sets + + +def test_concat_libraries_only_first_provided(): + prepare_for_exit() + top_data_sets = ["FIRST.TOP.LIB", "SECOND.TOP.LIB"] + module = setup_and_update_parms({ + "steplib": {"data_sets": None, "top_data_sets": top_data_sets}}) + concat_libs = module._concat_libraries("steplib") + assert concat_libs == top_data_sets + + +def test_concat_libraries_only_second_provided(): + prepare_for_exit() + data_sets = ["FIRST.LIB", "SECOND.LIB"] + module = setup_and_update_parms({ + "steplib": {"data_sets": data_sets, "top_data_sets": None}}) + concat_libs = module._concat_libraries("steplib") + assert concat_libs == data_sets + + +def test_concat_libraries_none_provided(): + module = setup_and_update_parms({ + "steplib": {"data_sets": None, "top_data_sets": None}}) + concat_libs = module._concat_libraries("steplib") + assert concat_libs == [] + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test_copy_libraries_to_steplib_and_dfhrpl(): + module = setup_and_update_parms({ + "region_data_sets": { + 'dfhauxt': {DSN: "TEST.CICSPY1.RDEV.DFHAUXT"}, + 'dfhbuxt': {DSN: "TEST.CICSPY1.RDEV.DFHBUXT"}, + 'dfhcsd': {DSN: "TEST.CICSPY1.RDEV.DFHCSD"}, + 'dfhgcd': {DSN: "TEST.CICSPY1.RDEV.DFHGCD"}, + 'dfhintra': {DSN: "TEST.CICSPY1.RDEV.DFHINTRA"}, + 'dfhlcd': {DSN: "TEST.CICSPY1.RDEV.DFHLCD"}, + 'dfhlrq': {DSN: "TEST.CICSPY1.RDEV.DFHLRQ"}, + 'dfhtemp': {DSN: "TEST.CICSPY1.RDEV.DFHTEMP"}, + 'dfhdmpa': {DSN: "TEST.CICSPY1.RDEV.DFHDMPA"}, + 'dfhdmpb': {DSN: "TEST.CICSPY1.RDEV.DFHDMPB"}, + "dfhstart": {DSN: DS_NAME} + }, + "cics_data_sets": { + "sdfhload": "TEST.CICS.SDFHLOAD", + "sdfhauth": "TEST.CICS.SDFHAUTH", + "sdfhlic": "TEST.CICS.SDFHLIC", + "template": "TEST.CICS.<< lib_name >>" + }, + "le_data_sets": { + "sceecics": "TEST.LE.SCEECICS", + "sceerun": "TEST.LE.SCEERUN", + "sceerun2": "TEST.LE.SCEERUN2", + "template": "TEST.LE.<< lib_name >>" + }, + "cpsm_data_sets": { + "seyuauth": "TEST.CPSM.SEYUAUTH", + "seyuload": "TEST.CPSM.SEYULOAD", + "template": "TEST.CPSM.<< lib_name >>" + }, + "steplib": { + "top_data_sets": [] + }, + "dfhrpl": { + "top_data_sets": [] + } + }) + module._copy_libraries_to_steplib_and_dfhrpl() + + assert module._module.params["steplib"] == { + "top_data_sets": [ + "TEST.CICS.SDFHAUTH", + "TEST.CICS.SDFHLIC", + "TEST.CPSM.SEYUAUTH", + "TEST.LE.SCEERUN", + "TEST.LE.SCEERUN2" + ] + } + assert module._module.params["dfhrpl"] == { + "top_data_sets": [ + "TEST.CICS.SDFHLOAD", + "TEST.CPSM.SEYULOAD", + "TEST.LE.SCEECICS", + "TEST.LE.SCEERUN", + "TEST.LE.SCEERUN2" + ] + } + + +@pytest.mark.skipif(sys.version_info.major < 3, reason="Requires python 3 language features") +def test__populate_dds(): + module = setup_and_update_parms({ + "region_data_sets": { + 'dfhauxt': {DSN: "test.dfhauxt"}, + 'dfhbuxt': {DSN: "test.dfhbuxt"}, + 'dfhcsd': {DSN: "test.dfhcsd"}, + 'dfhgcd': {DSN: "test.dfhgcd"}, + 'dfhintra': {DSN: "test.dfhintra"}, + 'dfhlcd': {DSN: "test.dfhlcd"}, + 'dfhlrq': {DSN: "test.dfhlrq"}, + 'dfhtemp': {DSN: "test.dfhtemp"}, + 'dfhdmpa': {DSN: "test.dfhdmpa"}, + 'dfhdmpb': {DSN: "test.dfhdmpb"}, + "dfhstart": {DSN: DS_NAME} + }, + "cics_data_sets": { + "sdfhload": "test.sdfhload", + "sdfhauth": "test.sdfhauth", + "sdfhlic": "test.sdfhlic", + }, + "le_data_sets": { + "sceecics": "test.sceecics", + "sceerun": "test.sceerun", + "sceerun2": "test.sceerun2", + }, + "cpsm_data_sets": { + "seyuauth": "test.seyuauth", + "seyuload": "test.seyuload", + }, + "steplib": { + "top_data_sets": ["some.top.lib"] + }, + "dfhrpl": { + "top_data_sets": ["another.top.lib"] + } + }) + dds = module._populate_dds() + + expected_dds = [ + { + "steplib": [ + {"disp": "SHR", "dsn": "SOME.TOP.LIB"}, + {"disp": "SHR", "dsn": "TEST.SDFHAUTH"}, + {"disp": "SHR", "dsn": "TEST.SDFHLIC"}, + {"disp": "SHR", "dsn": "TEST.SEYUAUTH"}, + {"disp": "SHR", "dsn": "TEST.SCEERUN"}, + {"disp": "SHR", "dsn": "TEST.SCEERUN2"}, + ] + }, + { + "dfhrpl": [ + {"disp": "SHR", "dsn": "ANOTHER.TOP.LIB"}, + {"disp": "SHR", "dsn": "TEST.SDFHLOAD"}, + {"disp": "SHR", "dsn": "TEST.SEYULOAD"}, + {"disp": "SHR", "dsn": "TEST.SCEECICS"}, + {"disp": "SHR", "dsn": "TEST.SCEERUN"}, + {"disp": "SHR", "dsn": "TEST.SCEERUN2"}, + ] + }, + {"dfhauxt": [{"disp": "SHR", "dsn": "TEST.DFHAUXT"}]}, + {"dfhbuxt": [{"disp": "SHR", "dsn": "TEST.DFHBUXT"}]}, + {"dfhcsd": [{"disp": "SHR", "dsn": "TEST.DFHCSD"}]}, + {"dfhgcd": [{"disp": "SHR", "dsn": "TEST.DFHGCD"}]}, + {"dfhintra": [{"disp": "SHR", "dsn": "TEST.DFHINTRA"}]}, + {"dfhlcd": [{"disp": "SHR", "dsn": "TEST.DFHLCD"}]}, + {"dfhlrq": [{"disp": "SHR", "dsn": "TEST.DFHLRQ"}]}, + {"dfhtemp": [{"disp": "SHR", "dsn": "TEST.DFHTEMP"}]}, + {"dfhdmpa": [{"disp": "SHR", "dsn": "TEST.DFHDMPA"}]}, + {"dfhdmpb": [{"disp": "SHR", "dsn": "TEST.DFHDMPB"}]}, + {"ceemsg": [{"sysout": "*"}]}, + {"ceeout": [{"sysout": "*"}]}, + {"msgusr": [{"sysout": "*"}]}, + {"sysprint": [{"sysout": "*"}]}, + {"sysudump": [{"sysout": "*"}]}, + {"sysabend": [{"sysout": "*"}]}, + {"sysout": [{"sysout": "*"}]}, + {"dfhcxrf": [{"sysout": "*"}]}, + {"logusr": [{"sysout": "*"}]}, + ] + + assert dds == expected_dds + + +def test_validate_parameters_job_name_too_long(): + prepare_for_fail() + job_name = "TOOOOLONGGGJOB" + with pytest.raises(AnsibleFailJson) as exec_info: + module = setup_and_update_parms({"job_parameters": {"job_name": job_name}}) + assert module.result["failed"] + assert exec_info.value.args[0]['msg'] == 'Invalid argument "{0}" for type "qualifier".'.format(job_name) + + +def test_validate_parameters_job_name(): + prepare_for_exit() + job_name = "STRTJOB" + module = setup_and_update_parms({"job_parameters": {"job_name": job_name}}) + module._exit() + assert not module.result["failed"] + + +def test_validate_parameters_ds_too_long(): + prepare_for_fail() + data_set_name = "TOOOOLONGG.DATA" + with pytest.raises(AnsibleFailJson) as exec_info: + module = setup_and_update_parms({"cics_data_sets": {"sdfhauth": data_set_name}}) + assert module.result["failed"] + assert exec_info.value.args[0]['msg'] == 'Invalid argument "{0}" for type "data_set_base".'.format(data_set_name) + + +def test_validate_parameters_ds(): + prepare_for_exit() + data_set_name = "DATASET.DATA" + module = setup_and_update_parms({"cics_data_sets": {"sdfhauth": data_set_name}}) + module._exit() + assert not module.result["failed"] + + +def test_validate_parameters_applid_too_long(): + prepare_for_fail() + applid = "APPLIDTOOLONG" + with pytest.raises(AnsibleFailJson) as exec_info: + module = setup_and_update_parms({"applid": applid}) + assert module.result["failed"] + assert exec_info.value.args[0]['msg'] == 'Invalid argument "{0}" for type "qualifier".'.format(applid) + + +def test_validate_parameters_steplib_library_too_long(): + prepare_for_fail() + steplib = "LIB.TOOO.LONGQUALIFIER" + with pytest.raises(AnsibleFailJson) as exec_info: + module = setup_and_update_parms({"steplib": {"top_data_sets": [steplib]}}) + assert module.result["failed"] + assert exec_info.value.args[0]['msg'] == 'Invalid argument "{0}" for type "data_set_base".'.format(steplib) + + +def test_validate_parameters_region_ds_too_long(): + prepare_for_fail() + region_ds = "LIB.TOOO.LONGQUALIFIER" + with pytest.raises(AnsibleFailJson) as exec_info: + module = setup_and_update_parms({"region_data_sets": {"dfhcsd": {"dsn": region_ds}, "dfhstart": {"dsn": DS_NAME}}}) + assert module.result["failed"] + assert exec_info.value.args[0]['msg'] == 'Invalid argument "{0}" for type "data_set_base".'.format(region_ds) + + +def test_wrap_sit_parameters_no_wrapping(): + content = ["USSHOME=HOMEDIR", "START=INITIAL", "APPLID=ABC123"] + wrapped_content = StartCICSModule._wrap_sit_parameters(content) + assert wrapped_content == content + + +def test_wrap_sit_parameters_no_equals(): + content = ["USSHOME=HOMEDIR", "HELLO", "APPLID=ABC123"] + wrapped_content = StartCICSModule._wrap_sit_parameters(content) + assert wrapped_content == content + + +def test_wrap_sit_parameters_wrapping_two_parms(): + content = ["USSHOME=LONGHOMEDIRECTORYLONGERTHAN80CHARACTERSNEEDSTOBEWRAPPEDBYTHEWRAPPINGMETHOD", + "START=INITIAL", + "GMTEXT='GOOD MORNING USER, WELCOME TO YOUR CICS REGION. THIS IS A LONG MESSAGE FOR TEST.'", + "APPLID=ABC123"] + wrapped_content = StartCICSModule._wrap_sit_parameters(content) + + assert wrapped_content == ["USSHOME=LONGHOMEDIRECTORYLONGERTHAN80CHARACTERSNEEDSTOBEWRAPPEDBYTHEWRAPPINGMETH", + "OD", + "START=INITIAL", + "GMTEXT='GOOD MORNING USER, WELCOME TO YOUR CICS REGION. THIS IS A LONG MESSAGE F", + "OR TEST.'", + "APPLID=ABC123"] + + +def test_wrap_sit_parameters_wrapping_one_parm(): + content = ["USSHOME=LONGHOMEDIRECTORYLONGERTHAN80CHARACTERSNEEDSTOBEWRAPPEDBYTHEWRAPPINGMETHOD", + "START=INITIAL", + "APPLID=ABC123"] + wrapped_content = StartCICSModule._wrap_sit_parameters(content) + + assert wrapped_content == ["USSHOME=LONGHOMEDIRECTORYLONGERTHAN80CHARACTERSNEEDSTOBEWRAPPEDBYTHEWRAPPINGMETH", + "OD", + "START=INITIAL", + "APPLID=ABC123"] + + +def test_find_sit_parm_key(): + sit_parm = "GMTEXT='HELLO" + key = StartCICSModule._find_sit_parm_key(sit_parm) + assert key == "GMTEXT" + + +def test_find_sit_parm_key_two_equals(): + sit_parm = "GMTEXT='HELLO=HOWAREYOU'" + key = StartCICSModule._find_sit_parm_key(sit_parm) + assert key == "GMTEXT" + + +def test_find_sit_parm_key_not_present(): + sit_parm = "HELLO" + key = StartCICSModule._find_sit_parm_key(sit_parm) + assert key is None + + +def test_calculate_size_parameters_with_overrides(): + module = setup_and_update_parms({ + "space_primary": 10, + "space_secondary": 5, + "space_type": "CYL" + }) + + assert module.unit == "CYL" + assert module.primary == 10 + assert module.primary_unit == "" + assert module.secondary == 5 + assert module.secondary_unit == "" + + module.generate_jcl() + module.calculate_size_parameters() + + assert module.unit == "CYL" + assert module.primary_unit == "CYL" + assert module.primary == 10 + assert module.secondary_unit == "CYL" + assert module.secondary == 5 + + +def test_calculate_size_parameters_no_overrides(): + module = setup_and_update_parms({}) + + assert module.unit == "M" + assert module.primary == 1 + assert module.primary_unit == "" + assert module.secondary == 1 + assert module.secondary_unit == "" + + module.generate_jcl() + module.calculate_size_parameters() + + assert module.unit == "M" + assert module.primary_unit == "K" + assert module.primary == 2 + assert module.secondary_unit == "K" + assert module.secondary == 1 + + +def test_calculate_size_parameters_primary_override_only(): + module = setup_and_update_parms({"space_primary": 10}) + + assert module.unit == "M" + assert module.primary == 10 + assert module.primary_unit == "" + assert module.secondary == 1 + assert module.secondary_unit == "" + + module.generate_jcl() + module.calculate_size_parameters() + + assert module.unit == "M" + assert module.primary_unit == "M" + assert module.primary == 10 + assert module.secondary_unit == "K" + assert module.secondary == 1 + + +def test_calculate_size_parameters_secondary_override_only(): + module = setup_and_update_parms({"space_secondary": 10}) + + assert module.unit == "M" + assert module.primary == 1 + assert module.primary_unit == "" + assert module.secondary == 10 + assert module.secondary_unit == "" + + module.generate_jcl() + module.calculate_size_parameters() + + assert module.unit == "M" + assert module.primary_unit == "K" + assert module.primary == 2 + assert module.secondary_unit == "M" + assert module.secondary == 10 + + +def test_calculate_size_parameters_primary_override_and_units_only(): + module = setup_and_update_parms({ + "space_primary": 10, + "space_type": "CYL" + }) + + assert module.unit == "CYL" + assert module.primary == 10 + assert module.primary_unit == "" + assert module.secondary == 1 + assert module.secondary_unit == "" + + module.generate_jcl() + module.calculate_size_parameters() + + assert module.unit == "CYL" + assert module.primary_unit == "CYL" + assert module.primary == 10 + assert module.secondary_unit == "K" + assert module.secondary == 1 + + +def test_calculate_size_parameters_primary_override_and_secondary_only(): + module = setup_and_update_parms({ + "space_primary": 10, + "space_secondary": "5" + }) + + assert module.unit == "M" + assert module.primary == 10 + assert module.primary_unit == "" + assert module.secondary == 5 + assert module.secondary_unit == "" + + module.generate_jcl() + module.calculate_size_parameters() + + assert module.unit == "M" + assert module.primary_unit == "M" + assert module.primary == 10 + assert module.secondary_unit == "M" + assert module.secondary == 5 + + +def test_calculate_size_parameters_secondary_override_and_units_only(): + module = setup_and_update_parms({ + "space_secondary": 10, + "space_type": "CYL" + }) + + assert module.unit == "CYL" + assert module.primary == 1 + assert module.primary_unit == "" + assert module.secondary == 10 + assert module.secondary_unit == "" + + module.generate_jcl() + module.calculate_size_parameters() + + assert module.unit == "CYL" + assert module.primary_unit == "K" + assert module.primary == 2 + assert module.secondary_unit == "CYL" + assert module.secondary == 10 + + +def test_calculate_size_parameters_units_override_only(): + module = setup_and_update_parms({ + "space_type": "CYL" + }) + + assert module.unit == "CYL" + assert module.primary == 1 + assert module.primary_unit == "" + assert module.secondary == 1 + assert module.secondary_unit == "" + + module.generate_jcl() + module.calculate_size_parameters() + + assert module.unit == "CYL" + assert module.primary_unit == "K" + assert module.primary == 2 + assert module.secondary_unit == "K" + assert module.secondary == 1 + + +def test_check_member(): + module = setup_and_update_parms({ + "region_data_sets": { + "dfhstart": { + "dsn": "TEST.DATA(MEMBER)" + } + } + }) + module.check_member() + + assert module.member is True + assert module.get_expected_ds_org() == "Partitioned" + + +def test_check_member_not_member(): + module = setup_and_update_parms({ + "region_data_sets": { + "dfhstart": { + "dsn": "TEST.DATA.PDSE" + } + } + }) + module.check_member() + + assert module.member is False + assert module.get_expected_ds_org() == "Sequential" + + +def test_initial_state(): + prepare_for_exit() + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(DS_NAME, "initial")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(DS_NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(DS_NAME, "PS"), ""), + ] + ) + + _data_set_utils._execute_iefbr14 = MagicMock( + return_value=MVSCmdResponse( + rc=0, stdout="", stderr=IEFBR14_create_stderr(DS_NAME, "DFHSTART") + ) + ) + + _data_set_utils._execute_command = MagicMock(return_value=(0, "", "")) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(DS_NAME), + stderr="", + ), + _execution( + name="IEFBR14 - dfhstart - Run 1", + rc=0, + stdout="", + stderr=IEFBR14_create_stderr(DS_NAME, "DFHSTART") + ), + _execution( + name="Copy JCL contents to data set", + rc=0, + stdout="", + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(DS_NAME, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + changed=True, + failed=False, + msg="", + jcl=get_sample_generated_JCL() + ) + assert region_jcl_module.get_result() == expected_result + + +def test_initial_state_pre_existing(): + prepare_for_exit() + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(DS_NAME, "initial")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(DS_NAME, "PS"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(DS_NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(DS_NAME, "PS"), ""), + ] + ) + _data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(DS_NAME), "") + ) + _data_set_utils._execute_iefbr14 = MagicMock( + return_value=MVSCmdResponse( + rc=0, stdout="", stderr=IEFBR14_create_stderr(DS_NAME, "DFHSTART") + ) + ) + + _data_set_utils._execute_command = MagicMock(return_value=(0, "", "")) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(DS_NAME, "PS"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, DS_NAME), + rc=0, + stdout=IDCAMS_delete(DS_NAME), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(DS_NAME), + stderr="", + ), + _execution( + name="IEFBR14 - dfhstart - Run 1", + rc=0, + stdout="", + stderr=IEFBR14_create_stderr(DS_NAME, "DFHSTART") + ), + _execution( + name="Copy JCL contents to data set", + rc=0, + stdout="", + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(DS_NAME, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Sequential" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + changed=True, + failed=False, + msg="", + jcl=get_sample_generated_JCL() + ) + assert region_jcl_module.get_result() == expected_result + + +def test_warm_state_match(): + prepare_for_exit() + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(DS_NAME, "warm")) + + _data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(0, LISTDS_data_set(DS_NAME, "PS"), "") + ) + _data_set_utils._execute_command = MagicMock(return_value=(0, get_sample_generated_JCL(), "")) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(DS_NAME, "PS"), + stderr="", + ), + _execution( + name="Read data set {0}".format(DS_NAME), + rc=0, + stdout=get_sample_generated_JCL(), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(DS_NAME, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Sequential" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + changed=False, + failed=False, + msg="", + jcl=get_sample_generated_JCL() + ) + assert region_jcl_module.get_result() == expected_result + + +def test_warm_state_not_existing(): + prepare_for_fail() + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(DS_NAME, "warm")) + + _data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(DS_NAME), ""), + ) + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(DS_NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=True, + msg="Data set TEST.DATA.START does not exist.", + jcl="" + ) + with pytest.raises(AnsibleFailJson): + region_jcl_module.main() + + assert region_jcl_module.get_result() == expected_result + + +def test_warm_state_non_match(): + prepare_for_fail() + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(DS_NAME, "warm")) + + _data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(0, LISTDS_data_set(DS_NAME, "PS"), "") + ) + _data_set_utils._execute_command = MagicMock(return_value=(0, "NON MATHCING JCL", "")) + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(DS_NAME, "PS"), + stderr="", + ), + _execution( + name="Read data set {0}".format(DS_NAME), + rc=0, + stdout="NON MATHCING JCL", + stderr="" + ), + ], + start_state=dict( + exists=True, + data_set_organization="Sequential" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + changed=False, + failed=True, + msg="Data set TEST.DATA.START does not contain the expected Region JCL.", + jcl=get_sample_generated_JCL() + ) + with pytest.raises(AnsibleFailJson): + region_jcl_module.main() + + assert region_jcl_module.get_result() == expected_result + + +def test_absent_state_pre_existing(): + prepare_for_exit() + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(DS_NAME, "absent")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(DS_NAME, "PS"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(DS_NAME), ""), + ] + ) + _data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(DS_NAME), "") + ) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(DS_NAME, "PS"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, DS_NAME), + rc=0, + stdout=IDCAMS_delete(DS_NAME), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(DS_NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Sequential" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + jcl="" + ) + assert region_jcl_module.get_result() == expected_result + + +def test_absent_state_not_existing(): + prepare_for_exit() + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(DS_NAME, "absent")) + + _data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(DS_NAME), ""), + ) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(DS_NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(DS_NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + jcl="" + ) + assert region_jcl_module.get_result() == expected_result + + +def test_initial_state_member(): + prepare_for_exit() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "initial")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(0, LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), "") + ] + ) + + _data_set_utils._execute_command = MagicMock(return_value=(0, "", "")) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + _execution( + name="Copy JCL contents to data set", + rc=0, + stdout="", + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="Partitioned" + ), + changed=True, + failed=False, + msg="", + jcl=get_sample_generated_JCL() + ) + assert region_jcl_module.get_result() == expected_result + + +def test_initial_state_member_base_not_exisitng(): + prepare_for_fail() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "initial")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(BASE_DS), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + ] + ) + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(BASE_DS), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ) + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=True, + msg="Base data set TEST.DATA does not exist. Can only create a member in an existing PDS/E", + jcl=get_sample_generated_JCL() + ) + with pytest.raises(AnsibleFailJson): + region_jcl_module.main() + assert region_jcl_module.get_result() == expected_result + + +def test_initial_state_pre_existing_member(): + prepare_for_exit() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "initial")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(0, LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(0, LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), "") + ] + ) + _data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(MEMBER_DS_NAME), "") + ) + + _data_set_utils._execute_command = MagicMock(return_value=(0, "", "")) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, MEMBER_DS_NAME), + rc=0, + stdout=IDCAMS_delete(MEMBER_DS_NAME), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + _execution( + name="Copy JCL contents to data set", + rc=0, + stdout="", + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Partitioned" + ), + end_state=dict( + exists=True, + data_set_organization="Partitioned" + ), + changed=True, + failed=False, + msg="", + jcl=get_sample_generated_JCL() + ) + assert region_jcl_module.get_result() == expected_result + + +def test_warm_state_match_member(): + prepare_for_exit() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "warm")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(0, LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(0, LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), "") + ] + ) + _data_set_utils._execute_command = MagicMock(return_value=(0, get_sample_generated_JCL(), "")) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), + stderr="", + ), + _execution( + name="Read data set {0}".format(MEMBER_DS_NAME), + rc=0, + stdout=get_sample_generated_JCL(), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Partitioned" + ), + end_state=dict( + exists=True, + data_set_organization="Partitioned" + ), + changed=False, + failed=False, + msg="", + jcl=get_sample_generated_JCL() + ) + assert region_jcl_module.get_result() == expected_result + + +def test_warm_state_not_existing_member(): + prepare_for_fail() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "warm")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + ] + ) + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=True, + msg="Data set TEST.DATA(START) does not exist.", + jcl="" + ) + with pytest.raises(AnsibleFailJson): + region_jcl_module.main() + + assert region_jcl_module.get_result() == expected_result + + +def test_warm_state_not_existing_member_and_base(): + prepare_for_fail() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "warm")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(BASE_DS), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + ] + ) + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(BASE_DS), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=True, + msg="Data set TEST.DATA(START) does not exist.", + jcl="" + ) + with pytest.raises(AnsibleFailJson): + region_jcl_module.main() + + assert region_jcl_module.get_result() == expected_result + + +def test_warm_state_non_match_member(): + prepare_for_fail() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "warm")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(0, LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), "") + ] + ) + _data_set_utils._execute_command = MagicMock(return_value=(0, "NON MATHCING JCL", "")) + + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), + stderr="", + ), + _execution( + name="Read data set {0}".format(MEMBER_DS_NAME), + rc=0, + stdout="NON MATHCING JCL", + stderr="" + ), + ], + start_state=dict( + exists=True, + data_set_organization="Partitioned" + ), + end_state=dict( + exists=True, + data_set_organization="Partitioned" + ), + changed=False, + failed=True, + msg="Data set TEST.DATA(START) does not contain the expected Region JCL.", + jcl=get_sample_generated_JCL() + ) + with pytest.raises(AnsibleFailJson): + region_jcl_module.main() + + assert region_jcl_module.get_result() == expected_result + + +def test_absent_state_pre_existing_member(): + prepare_for_exit() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "absent")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(0, LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + ] + ) + _data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(MEMBER_DS_NAME), "") + ) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTSDS_member_data_set(BASE_DS, MEMBER_NAME), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, MEMBER_DS_NAME), + rc=0, + stdout=IDCAMS_delete(MEMBER_DS_NAME), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Partitioned" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + jcl="" + ) + assert region_jcl_module.get_result() == expected_result + + +def test_absent_state_not_existing_member(): + prepare_for_exit() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "absent")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(BASE_DS, "PO"), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + ] + ) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(BASE_DS, "PO"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + jcl="" + ) + assert region_jcl_module.get_result() == expected_result + + +def test_absent_state_not_existing_member_and_base(): + prepare_for_exit() + BASE_DS = "TEST.DATA" + MEMBER_NAME = "START" + MEMBER_DS_NAME = "{0}({1})".format(BASE_DS, MEMBER_NAME) + region_jcl_module = setup_and_update_parms(get_sample_generated_JCL_args(MEMBER_DS_NAME, "absent")) + + _data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(BASE_DS), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(BASE_DS), ""), + MVSCmdResponse(4, LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), ""), + ] + ) + + region_jcl_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(BASE_DS), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(BASE_DS), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=4, + stdout=LISTDS_member_doesnt_exist(BASE_DS, MEMBER_NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + jcl="" + ) + assert region_jcl_module.get_result() == expected_result diff --git a/tests/unit/modules/test_stop_region.py b/tests/unit/modules/test_stop_region.py new file mode 100644 index 00000000..ccbbc8f1 --- /dev/null +++ b/tests/unit/modules/test_stop_region.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from mock import MagicMock +from ansible_collections.ibm.ibm_zos_cics.plugins.modules.stop_region import ( + AnsibleStopCICSModule as stop_region, SDTRAN +) + +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + set_module_args +) +default_arg_parms = { + "job_id": "ANS12345", + "mode": "normal" +} + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + stop_module = stop_region() + # Mock Ansible module fail and exits, this prevents sys.exit being called but retains an accurate results + stop_module._module.fail_json = MagicMock(return_value=None) + stop_module._module.exit_json = MagicMock(return_value=None) + return stop_module + + +def test__validate_sdtran(): + stop_module = initialise_module() + stop_module._module.params[SDTRAN] = "CESD" + stop_module.main() + assert stop_module.failed is False + + +def test__validate_sdtran_3_chars(): + stop_module = initialise_module() + stop_module._module.params[SDTRAN] = "C$D" + stop_module.main() + assert stop_module.failed is False + + +def test__validate_sdtran_numerical(): + stop_module = initialise_module() + stop_module._module.params[SDTRAN] = "1234" + stop_module.main() + assert stop_module.failed is False + + +def test__validate_sdtran_too_long(): + stop_module = initialise_module() + stop_module._module.params[SDTRAN] = "CESDS" + stop_module.main() + assert stop_module.failed + assert ( + stop_module.msg + == "Value: CESDS, is invalid. SDTRAN value must be 1-4 characters." + ) diff --git a/tests/unit/modules/test_td_intrapartition.py b/tests/unit/modules/test_td_intrapartition.py new file mode 100644 index 00000000..da2b66a9 --- /dev/null +++ b/tests/unit/modules/test_td_intrapartition.py @@ -0,0 +1,364 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2023,2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils, _icetool as icetool +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + ICETOOL_name, + ICETOOL_stderr, + ICETOOL_stdout, + IDCAMS_delete_run_name, + IDCAMS_delete, + IDCAMS_create_run_name, + LISTDS_data_set_doesnt_exist, + LISTDS_data_set, + LISTDS_run_name, + set_module_args +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules import td_intrapartition +import pytest +import sys + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +__metaclass__ = type + +NAME = "TEST.REGIONS.INTRA" + +default_arg_parms = { + "space_primary": 5, + "space_secondary": 3, + "space_type": "M", + "region_data_sets": {"dfhintra": {"dsn": NAME}}, + "state": "initial", +} + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + intra_module = td_intrapartition.AnsibleTDIntrapartitionModule() + intra_module._module.fail_json = MagicMock(return_value=None) + intra_module._module.exit_json = MagicMock(return_value=None) + return intra_module + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_td_intrapartition_ds(): + intra_module = initialise_module() + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, NAME, "")) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + + intra_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert intra_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_td_intrapartition_ds(): + intra_module = initialise_module(state="absent") + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(NAME), "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + ] + ) + + intra_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + ) + assert intra_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_intra_and_replace(): + intra_module = initialise_module() + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), ""), + MVSCmdResponse(0, LISTDS_data_set(NAME, "VSAM"), ""), + ] + ) + data_set_utils._execute_idcams = MagicMock( + side_effect=[ + MVSCmdResponse(0, IDCAMS_delete(NAME), ""), + MVSCmdResponse(0, NAME, ""), + ] + ) + icetool._execute_icetool = MagicMock( + return_value=( + MVSCmdResponse( + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ) + ) + ) + + intra_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + _execution( + name=ICETOOL_name(1), + rc=0, + stdout=ICETOOL_stdout(52), + stderr=ICETOOL_stderr() + ), + _execution( + name=IDCAMS_delete_run_name(1, NAME), + rc=0, + stdout=IDCAMS_delete(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=IDCAMS_create_run_name(1, NAME), + rc=0, + stdout=NAME, + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=True, + failed=False, + msg="", + ) + assert intra_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_remove_non_existent_intra(): + intra_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAME), "") + ) + + intra_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + ) + assert intra_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_on_non_existent_intra(): + intra_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock(return_value=MVSCmdResponse( + 8, LISTDS_data_set_doesnt_exist(NAME), "")) + + intra_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr=""), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAME), + stderr=""), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=True, + msg="Data set {0} does not exist.".format(NAME), + ) + assert intra_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_success_intra(): + intra_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse( + 0, + LISTDS_data_set(NAME, "VSAM"), + "" + ) + ) + + intra_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAME, "VSAM"), + stderr=""), + ], + start_state=dict( + exists=True, + data_set_organization="VSAM" + ), + end_state=dict( + exists=True, + data_set_organization="VSAM" + ), + changed=False, + failed=False, + msg="", + ) + assert intra_module.get_result() == expected_result diff --git a/tests/unit/modules/test_transaction_dump.py b/tests/unit/modules/test_transaction_dump.py new file mode 100644 index 00000000..eec703db --- /dev/null +++ b/tests/unit/modules/test_transaction_dump.py @@ -0,0 +1,344 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) +from __future__ import absolute_import, division, print_function +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils import _data_set_utils as data_set_utils +from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution +from ansible_collections.ibm.ibm_zos_cics.tests.unit.helpers.data_set_helper import ( + PYTHON_LANGUAGE_FEATURES_MESSAGE, + IDCAMS_delete_run_name, + IDCAMS_delete, + IEFBR14_create_stderr, + LISTDS_data_set_doesnt_exist, + LISTDS_data_set, + LISTDS_run_name, + set_module_args +) +from ansible_collections.ibm.ibm_zos_cics.plugins.modules import transaction_dump +import pytest +import sys + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmdResponse + +try: + from unittest.mock import MagicMock +except ImportError: + from mock import MagicMock + + +__metaclass__ = type + +NAMEA = "TEST.REGIONS.DFHDMPA" +NAMEB = "TEST.REGIONS.DFHDMPB" + +default_arg_parms = { + "space_primary": 20, + "space_secondary": 3, + "space_type": "M", + "region_data_sets": { + "dfhdmpa": { + "dsn": NAMEA + }, + "dfhdmpb": { + "dsn": NAMEB + } + }, + "state": "initial", + "destination": "A" +} + + +def initialise_module(**kwargs): + initial_args = default_arg_parms + initial_args.update(kwargs) + set_module_args(initial_args) + transaction_dump_module = transaction_dump.AnsibleTransactionDumpModule() + transaction_dump_module._module.fail_json = MagicMock(return_value=None) + transaction_dump_module._module.exit_json = MagicMock(return_value=None) + return transaction_dump_module + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_transaction_dump(): + transaction_dump_module = initialise_module() + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEA), ""), + MVSCmdResponse(0, LISTDS_data_set(NAMEA, "PS"), ""), + ] + ) + data_set_utils._execute_iefbr14 = MagicMock( + return_value=MVSCmdResponse( + rc=0, stdout="", stderr=IEFBR14_create_stderr(NAMEA, "DFHDMPA") + ) + ) + + transaction_dump_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + _execution( + name="IEFBR14 - dfhdmpa - Run 1", + rc=0, + stdout="", + stderr=IEFBR14_create_stderr(NAMEA, "DFHDMPA") + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEA, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + changed=True, + failed=False, + msg="", + ) + assert transaction_dump_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_delete_an_existing_transaction_dump(): + transaction_dump_module = initialise_module(state="absent") + + data_set_utils._execute_idcams = MagicMock( + return_value=MVSCmdResponse(0, IDCAMS_delete(NAMEA), "") + ) + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(0, LISTDS_data_set(NAMEA, "PS"), ""), + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEA), ""), + ] + ) + + transaction_dump_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEA, "PS"), + stderr="", + ), + _execution( + name=IDCAMS_delete_run_name(1, NAMEA), + rc=0, + stdout=IDCAMS_delete(NAMEA), + stderr="" + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Sequential" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=True, + failed=False, + msg="", + ) + assert transaction_dump_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_remove_non_existent_transaction_dump(): + transaction_dump_module = initialise_module(state="absent") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEA), "") + ) + + transaction_dump_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + changed=False, + failed=False, + msg="", + ) + assert transaction_dump_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_create_an_intial_destination_b_transaction_dump(): + transaction_dump_module = initialise_module(destination="B") + + data_set_utils._execute_listds = MagicMock( + side_effect=[ + MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEB), ""), + MVSCmdResponse(0, LISTDS_data_set(NAMEB, "PS"), ""), + ] + ) + data_set_utils._execute_iefbr14 = MagicMock( + return_value=MVSCmdResponse( + rc=0, stdout="", stderr=IEFBR14_create_stderr(NAMEB, "DFHDMPB") + ) + ) + + transaction_dump_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEB), + stderr="", + ), + _execution( + name="IEFBR14 - dfhdmpb - Run 1", + rc=0, + stdout="", + stderr=IEFBR14_create_stderr(NAMEB, "DFHDMPB")), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEB, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + changed=True, + failed=False, + msg="", + ) + assert transaction_dump_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_on_non_existent(): + transaction_dump_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(8, LISTDS_data_set_doesnt_exist(NAMEA), "") + ) + + transaction_dump_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=8, + stdout=LISTDS_data_set_doesnt_exist(NAMEA), + stderr="", + ), + ], + start_state=dict( + exists=False, + data_set_organization="NONE" + ), + end_state=dict( + exists=False, + data_set_organization="NONE" + ), + failed=True, + changed=False, + msg="Data set {0} does not exist.".format(NAMEA), + ) + assert transaction_dump_module.get_result() == expected_result + + +@pytest.mark.skipif( + sys.version_info.major < 3, reason=PYTHON_LANGUAGE_FEATURES_MESSAGE +) +def test_warm_success(): + transaction_dump_module = initialise_module(state="warm") + + data_set_utils._execute_listds = MagicMock( + return_value=MVSCmdResponse(0, LISTDS_data_set(NAMEA, "PS"), "") + ) + + transaction_dump_module.main() + expected_result = dict( + executions=[ + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEA, "PS"), + stderr="", + ), + _execution( + name=LISTDS_run_name(1), + rc=0, + stdout=LISTDS_data_set(NAMEA, "PS"), + stderr="", + ), + ], + start_state=dict( + exists=True, + data_set_organization="Sequential" + ), + end_state=dict( + exists=True, + data_set_organization="Sequential" + ), + failed=False, + changed=False, + msg="", + ) + assert transaction_dump_module.get_result() == expected_result diff --git a/tests/unit/plugin_utils/test_module_action_plugin.py b/tests/unit/plugin_utils/test_module_action_plugin.py new file mode 100644 index 00000000..15eb2b8c --- /dev/null +++ b/tests/unit/plugin_utils/test_module_action_plugin.py @@ -0,0 +1,244 @@ +# -*- coding: utf-8 -*- + +# (c) Copyright IBM Corp. 2024 +# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +from ansible.parsing.dataloader import DataLoader +from ansible.template import Templar +from ansible_collections.ibm.ibm_zos_cics.plugins.plugin_utils._module_action_plugin import ( + _check_library_override, + _check_region_override, + _remove_region_data_set_args, + _remove_cics_data_set_args, + _process_region_data_set_args, + _validate_data_set_length, + _validate_list_of_data_set_lengths, + _process_libraries_args, + _check_template, + _set_top_libraries_key +) + + +def get_templar(module_args): + loader = DataLoader() + templar = Templar(loader=loader, variables=module_args) + return templar + + +def test__check_region_override(): + args_with_override = {"region_data_sets": {"dfhgcd": {"dsn": "data.set.path"}}} + args_with_template = {"region_data_sets": {"template": "data.set.template.<< data_set_name >>"}} + args_with_both = {"region_data_sets": {"template": "data.set.template.<< data_set_name >>", "dfhgcd": {"dsn": "data.set.path"}}} + + assert _check_region_override(args_with_override, "dfhgcd") is True + assert _check_region_override(args_with_template, "dfhgcd") is False + assert _check_region_override(args_with_both, "dfhgcd") is True + + +def test__check_library_override(): + args_with_override = {"cics_data_sets": {"sdfhload": "data.set.path"}} + args_with_template = {"cics_data_sets": {"template": "data.set.template.<< data_set_name >>"}} + args_with_both = {"cics_data_sets": {"template": "data.set.template.<< data_set_name >>", "sdfhload": "data.set.path"}} + + assert _check_library_override(args_with_override, "cics_data_sets", "sdfhload") is True + assert _check_library_override(args_with_template, "cics_data_sets", "sdfhload") is False + assert _check_library_override(args_with_both, "cics_data_sets", "sdfhload") is True + + +def test__remove_region_data_set_args(): + args_with_extra_region_data_sets = {"region_data_sets": {"dfhgcd": {"dsn": "data.set.path"}, "dfhlcd": {"dsn": "data.set.path"}}} + _remove_region_data_set_args(args_with_extra_region_data_sets, "dfhgcd") + + assert "dfhgcd" in list(args_with_extra_region_data_sets["region_data_sets"].keys()) + assert "dfhlcd" not in list(args_with_extra_region_data_sets["region_data_sets"].keys()) + + +def test__remove_cics_data_set_args(): + args_with_extra_cics_data_sets = {"cics_data_sets": {"sdfhload": "data.set.path", "sdfhlic": "data.set.path", "sdfhauth": "data.set.path"}} + _remove_cics_data_set_args(args_with_extra_cics_data_sets, "sdfhload") + + assert "sdfhload" in list(args_with_extra_cics_data_sets["cics_data_sets"].keys()) + assert "sdfhlic" not in list(args_with_extra_cics_data_sets["cics_data_sets"].keys()) + assert "sdfhauth" not in list(args_with_extra_cics_data_sets["cics_data_sets"].keys()) + + +def test__process_region_data_set_args_with_template(): + args_with_template = {"region_data_sets": {"template": "data.set.template.<< data_set_name >>"}} + templar = get_templar(args_with_template) + task_vars = args_with_template + + _process_region_data_set_args(args_with_template, templar, "dfhgcd", task_vars) + + assert "dfhgcd" in list(args_with_template["region_data_sets"].keys()) + assert args_with_template["region_data_sets"]["dfhgcd"] == {"dsn": "data.set.template.DFHGCD"} + + +def test__process_region_data_set_args_without_template(): + args_with_override = {"region_data_sets": {"dfhgcd": {"dsn": "data.set.template.global"}}} + templar = get_templar(args_with_override) + task_vars = args_with_override + + _process_region_data_set_args(args_with_override, templar, "dfhgcd", task_vars) + + assert args_with_override["region_data_sets"]["dfhgcd"]["dsn"] == "data.set.template.global" + + +def test__process_region_data_set_args_without_template_or_override(): + args_with_garbage = {"region_data_sets": {"garbage": "more.garbage"}} + templar = get_templar(args_with_garbage) + task_vars = args_with_garbage + + try: + _process_region_data_set_args(args_with_garbage, templar, "dfhgcd", task_vars) + except KeyError as e: + assert e.args[0] == "No template or data set override found for dfhgcd" + else: + assert False + + +def test__validate_data_set_length(): + _validate_data_set_length("DATA.SET.DFHAUXT") + _validate_data_set_length("DATA.SET.TEST.UNITS.SDFHAUTH") + # 44 characters + _validate_data_set_length("TESTDATA.TESTDATA.TESTDATA.TESTDATA.DFHINTRA") + + +def test__validate_data_set_length_too_long(): + ds_name = "data.set.template.long.name.should.fail.global.dfhcsd" + try: + _validate_data_set_length(ds_name) + except ValueError as e: + assert e.args[0] == "Data set: {0} is longer than 44 characters.".format(ds_name) + else: + assert False + + +def test__validate_data_set_length_45_characters(): + ds_name = "testdata.testdata.testdata.tests.dfh.dfhintra" + try: + _validate_data_set_length(ds_name) + except ValueError as e: + assert e.args[0] == "Data set: {0} is longer than 44 characters.".format(ds_name) + else: + assert False + + +def test__validate_list_of_data_set_lengths(): + ds_list = ["testdata.testdata.testdata.tests.dfhcsd", "testdata.testdata.testdata.tests.dfhintra"] + _validate_list_of_data_set_lengths(ds_list) + + +def test__validate_list_of_data_set_lengths_one_too_long(): + ds_list = ["testdata.testdata.testdata.tests.dfhcsd", "testdata.testdata.testdata.tests.intra.dfhintra"] + try: + _validate_list_of_data_set_lengths(ds_list) + except ValueError as e: + assert e.args[0] == "Data set: {0} is longer than 44 characters.".format("testdata.testdata.testdata.tests.intra.dfhintra") + else: + assert False + + +def test__process_libraries_args_with_template(): + args_with_template = {"cics_data_sets": {"template": "data.set.template.<< lib_name >>"}} + templar = get_templar(args_with_template) + task_vars = args_with_template + + _process_libraries_args(args_with_template, templar, task_vars, "cics_data_sets", "sdfhload") + + assert "sdfhload" in list(args_with_template["cics_data_sets"].keys()) + assert args_with_template["cics_data_sets"]["sdfhload"] == "data.set.template.SDFHLOAD" + + +def test__process_libraries_args_with_too_long_cics_data_set(): + args_with_template = {"cics_data_sets": {"template": "data.set.template.too.long.for.jcl.rules.<< lib_name >>"}} + templar = get_templar(args_with_template) + task_vars = args_with_template + + try: + _process_libraries_args(args_with_template, templar, task_vars, "cics_data_sets", "sdfhload") + except ValueError as e: + assert e.args[0] == "Data set: data.set.template.too.long.for.jcl.rules.SDFHLOAD is longer than 44 characters." + else: + assert False + assert args_with_template["cics_data_sets"]["sdfhload"] == "data.set.template.too.long.for.jcl.rules.SDFHLOAD" + + +def test__process_libraries_args_with_too_long_le_data_set(): + args_with_template = {"le_data_sets": {"template": "data.set.template.too.long.for.jcl.rules.<< lib_name >>"}} + templar = get_templar(args_with_template) + task_vars = args_with_template + + try: + _process_libraries_args(args_with_template, templar, task_vars, "le_data_sets", "sceecics") + except ValueError as e: + assert e.args[0] == "Data set: data.set.template.too.long.for.jcl.rules.SCEECICS is longer than 44 characters." + else: + assert False + assert args_with_template["le_data_sets"]["sceecics"] == "data.set.template.too.long.for.jcl.rules.SCEECICS" + + +def test__process_libraries_args_without_template(): + args_with_override = {"cics_data_sets": {"sdfhload": "data.set.template.load"}} + templar = get_templar(args_with_override) + task_vars = args_with_override + + _process_libraries_args(args_with_override, templar, task_vars, "cics_data_sets", "sdfhload") + + assert args_with_override["cics_data_sets"]["sdfhload"] == "data.set.template.load" + + +def test__process_libraries_args_without_template_or_override(): + args_with_garbage = {"cics_data_sets": {"garbage": "more.garbage"}} + templar = get_templar(args_with_garbage) + task_vars = args_with_garbage + + try: + _process_libraries_args(args_with_garbage, templar, task_vars, "cics_data_sets", "sdfhload") + except KeyError as e: + assert e.args[0] == "No template or library override found for sdfhload" + else: + assert False + + +def test__check_template(): + args_with_override = {"region_data_sets": {"dfhgcd": {"dsn": "data.set.path"}}} + args_with_template = {"region_data_sets": {"template": "data.set.template.<< data_set_name >>"}} + args_with_both = {"region_data_sets": {"template": "data.set.template.<< data_set_name >>", "dfhgcd": {"dsn": "data.set.path"}}} + + assert _check_template(args_with_override, "region_data_sets") is False + assert _check_template(args_with_template, "region_data_sets") is True + assert _check_template(args_with_both, "region_data_sets") is True + + +def test__set_top_libraries_key(): + args_without_top_libs = {"region_data_sets": {"template": "data.set.template.<< data_set_name >>"}} + _set_top_libraries_key(args_without_top_libs, "dfhrpl") + + assert "top_data_sets" in list(args_without_top_libs["dfhrpl"].keys()) + assert len(list(args_without_top_libs.keys())) == 2 + + +def test__set_top_libraries_key_with_existing_key(): + args_without_top_libs = {"region_data_sets": {"template": "data.set.template.<< data_set_name >>"}, "dfhrpl": {"top_data_sets": "data.set.path"}} + + assert len(list(args_without_top_libs.keys())) == 2 + + _set_top_libraries_key(args_without_top_libs, "dfhrpl") + + assert len(list(args_without_top_libs.keys())) == 2 + assert "top_data_sets" in list(args_without_top_libs["dfhrpl"].keys()) + assert args_without_top_libs["dfhrpl"]["top_data_sets"] == "data.set.path" + + +def test__set_top_libraries_key_with_existing_libraries_key_not_top_libraries_key(): + args_without_top_libs = {"region_data_sets": {"template": "data.set.template.<< data_set_name >>"}, "dfhrpl": {"data_sets": "data.set.path"}} + + assert len(list(args_without_top_libs.keys())) == 2 + _set_top_libraries_key(args_without_top_libs, "dfhrpl") + + assert len(list(args_without_top_libs.keys())) == 2 + assert "top_data_sets" in list(args_without_top_libs["dfhrpl"].keys()) + assert args_without_top_libs["dfhrpl"]["data_sets"] == "data.set.path" diff --git a/yamllint.yaml b/yamllint.yaml index 262af58a..76041137 100644 --- a/yamllint.yaml +++ b/yamllint.yaml @@ -1,4 +1,4 @@ -# (c) Copyright IBM Corp. 2020,2021 +# (c) Copyright IBM Corp. 2020,2023 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) --- # Based on ansible-lint config