From 6dd30fa4196f93ed84c2689c25701838b372c69a Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Thu, 4 Jan 2024 10:29:16 +0100 Subject: [PATCH 1/8] Fix for remote mount Signed-off-by: Rajan Mishra --- roles/remotemount_configure/tasks/remotecluster.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/roles/remotemount_configure/tasks/remotecluster.yml b/roles/remotemount_configure/tasks/remotecluster.yml index 4da9dba6..5770f24f 100644 --- a/roles/remotemount_configure/tasks/remotecluster.yml +++ b/roles/remotemount_configure/tasks/remotecluster.yml @@ -285,9 +285,12 @@ run_once: True - set_fact: - owning_nodes_name: "{{ owning_nodes_name }} + [ '{{ item.adminNodeName }}' ]" + owning_nodes_name: "{{ owning_nodes_name + [item.adminNodeName] }}" with_items: "{{ owning_cluster_nodes.json.nodes }}" run_once: True + + - debug: + msg: "{{owning_nodes_name}}" # # This Section is when using daemonNodeName @@ -312,7 +315,7 @@ run_once: True - set_fact: - owning_daemon_nodes_name: "{{ owning_daemon_nodes_name }} + [ '{{ item.json.nodes.0.network.daemonNodeName }}' ]" + owning_daemon_nodes_name: "{{ owning_daemon_nodes_name + [item.json.nodes.0.network.daemonNodeName] }}" with_items: "{{ owning_cluster_daemonnodes.results }}" run_once: True From b93247f61dfccfac167ffdf0f398aa28bbe91183 Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Sun, 28 Apr 2024 15:25:14 +0200 Subject: [PATCH 2/8] Fixed sync issue Signed-off-by: Rajan Mishra --- roles/remotemount_configure/tasks/remotecluster.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/roles/remotemount_configure/tasks/remotecluster.yml b/roles/remotemount_configure/tasks/remotecluster.yml index 5770f24f..99e3e03e 100644 --- a/roles/remotemount_configure/tasks/remotecluster.yml +++ b/roles/remotemount_configure/tasks/remotecluster.yml @@ -288,9 +288,6 @@ owning_nodes_name: "{{ owning_nodes_name + [item.adminNodeName] }}" with_items: "{{ owning_cluster_nodes.json.nodes }}" run_once: True - - - debug: - msg: "{{owning_nodes_name}}" # # This Section is when using daemonNodeName From 0dbe217bf170659484a85fb838eb6f10ad305f1f Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Wed, 28 Aug 2024 19:45:27 +0200 Subject: [PATCH 3/8] CES S3 upgrade support role Signed-off-by: Rajan Mishra --- roles/s3_upgrade/README.md | 1 + roles/s3_upgrade/defaults/main.yml | 20 +++ roles/s3_upgrade/handlers/main.yml | 4 + roles/s3_upgrade/meta/main.yml | 20 +++ roles/s3_upgrade/tasks/apt/install.yml | 15 ++ roles/s3_upgrade/tasks/install.yml | 69 +++++++++ roles/s3_upgrade/tasks/install_dir_pkg.yml | 77 ++++++++++ roles/s3_upgrade/tasks/install_local_pkg.yml | 137 ++++++++++++++++++ roles/s3_upgrade/tasks/install_remote_pkg.yml | 109 ++++++++++++++ roles/s3_upgrade/tasks/install_repository.yml | 31 ++++ roles/s3_upgrade/tasks/main.yml | 4 + roles/s3_upgrade/tasks/yum/install.yml | 6 + roles/s3_upgrade/tasks/zypper/install.yml | 6 + roles/s3_upgrade/vars/main.yml | 10 ++ 14 files changed, 509 insertions(+) create mode 120000 roles/s3_upgrade/README.md create mode 100644 roles/s3_upgrade/defaults/main.yml create mode 100644 roles/s3_upgrade/handlers/main.yml create mode 100644 roles/s3_upgrade/meta/main.yml create mode 100644 roles/s3_upgrade/tasks/apt/install.yml create mode 100644 roles/s3_upgrade/tasks/install.yml create mode 100644 roles/s3_upgrade/tasks/install_dir_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_local_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_remote_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_repository.yml create mode 100644 roles/s3_upgrade/tasks/main.yml create mode 100644 roles/s3_upgrade/tasks/yum/install.yml create mode 100644 roles/s3_upgrade/tasks/zypper/install.yml create mode 100644 roles/s3_upgrade/vars/main.yml diff --git a/roles/s3_upgrade/README.md b/roles/s3_upgrade/README.md new file mode 120000 index 00000000..6a3df305 --- /dev/null +++ b/roles/s3_upgrade/README.md @@ -0,0 +1 @@ +../../docs/README.NFS.md \ No newline at end of file diff --git a/roles/s3_upgrade/defaults/main.yml b/roles/s3_upgrade/defaults/main.yml new file mode 100644 index 00000000..a694d569 --- /dev/null +++ b/roles/s3_upgrade/defaults/main.yml @@ -0,0 +1,20 @@ +--- +# Default variables for the IBM Spectrum Scale (S3) role - +# either edit this file or define your own variables to override the defaults + +## Specify the URL of the (existing) Spectrum Scale YUM/apt/zypper repository +#scale_install_s3_repository_rpms: http:///s3_rpms/ +#scale_install_s3_repository_debs: http:///s3_debs/ +#scale_install_s3_repository_rpms_sles: http:///s3_rpms/sles12/ + +## List of S3 packages to install +scale_s3_packages: +- noobaa-core +- gpfs.mms3 + +## Temporary directory to copy installation package to +## (local package installation method) +scale_install_localpkg_tmpdir_path: /tmp + +## Flag to install s3 debug package +scale_s3_install_debuginfo: true diff --git a/roles/s3_upgrade/handlers/main.yml b/roles/s3_upgrade/handlers/main.yml new file mode 100644 index 00000000..2e896124 --- /dev/null +++ b/roles/s3_upgrade/handlers/main.yml @@ -0,0 +1,4 @@ +--- +# handlers file for node +- name: yum-clean-metadata + command: yum clean metadata diff --git a/roles/s3_upgrade/meta/main.yml b/roles/s3_upgrade/meta/main.yml new file mode 100644 index 00000000..d32d632b --- /dev/null +++ b/roles/s3_upgrade/meta/main.yml @@ -0,0 +1,20 @@ +--- +galaxy_info: + author: IBM Corporation + description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) + company: IBM + + license: Apache-2.0 + + min_ansible_version: 2.9 + + platforms: + - name: EL + versions: + - 7 + - 8 + + galaxy_tags: [] + +dependencies: + - ibm.spectrum_scale.core_common diff --git a/roles/s3_upgrade/tasks/apt/install.yml b/roles/s3_upgrade/tasks/apt/install.yml new file mode 100644 index 00000000..75fd2f00 --- /dev/null +++ b/roles/s3_upgrade/tasks/apt/install.yml @@ -0,0 +1,15 @@ +--- +- name: upgrade | Upgrade s3 packages + package: + name: "{{ scale_install_all_packages }}" + state: latest + when: scale_install_repository_url is defined + + +- name: upgrade | Upgrade GPFS S3 deb + apt: + deb: "{{ item }}" + state: latest + when: scale_install_repository_url is not defined + with_items: + - "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install.yml b/roles/s3_upgrade/tasks/install.yml new file mode 100644 index 00000000..88264e77 --- /dev/null +++ b/roles/s3_upgrade/tasks/install.yml @@ -0,0 +1,69 @@ +--- +# Install or update RPMs +# Ensure that installation method was chosen during previous role +- block: + - name: upgrade | Check for repository installation method + set_fact: + scale_installmethod: repository + when: + - scale_install_repository_url is defined + + - name: upgrade | Check for localpkg installation method + set_fact: + scale_installmethod: local_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is undefined + - scale_install_localpkg_path is defined + + - name: upgrade | Check for remotepkg installation method + set_fact: + scale_installmethod: remote_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is defined + + - name: upgrade | Check for directory package installation method + set_fact: + scale_installmethod: dir_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is undefined + - scale_install_localpkg_path is undefined + - scale_install_directory_pkg_path is defined + + - name: upgrade | Check installation method + assert: + that: scale_installmethod is defined + msg: >- + Please set the appropriate variable 'scale_install_*' for your desired + installation method! + run_once: true + delegate_to: localhost + +# Run chosen installation method to get list of RPMs + +- name: upgrade | Initialize list of packages + set_fact: + scale_install_all_packages: [] + +- name: upgrade | Set the extracted package directory path + set_fact: + s3_extracted_path: "{{ scale_extracted_path }}" + +- name: upgrade | Stat extracted packages directory + stat: + path: "{{ s3_extracted_path }}" + register: scale_extracted_gpfs_dir + +- include_tasks: install_{{ scale_installmethod }}.yml + +- import_tasks: apt/install.yml + when: ansible_distribution in scale_ubuntu_distribution + +- import_tasks: yum/install.yml + when: ansible_distribution in scale_rhel_distribution + +- import_tasks: zypper/install.yml + when: ansible_distribution in scale_sles_distribution + diff --git a/roles/s3_upgrade/tasks/install_dir_pkg.yml b/roles/s3_upgrade/tasks/install_dir_pkg.yml new file mode 100644 index 00000000..0dc1730a --- /dev/null +++ b/roles/s3_upgrade/tasks/install_dir_pkg.yml @@ -0,0 +1,77 @@ +--- +# Dir package installation method + +- block: ## run_once: true + - name: install | Stat directory installation package + stat: + path: "{{ scale_install_directory_pkg_path }}" + register: scale_install_dirpkg + + - name: install | Check directory installation package + assert: + that: scale_install_dirpkg.stat.exists + msg: >- + Please set the variable 'scale_install_directory_pkg_path' to point to the + local installation package (accessible on Ansible control machine)! + run_once: true + delegate_to: localhost + +- name: install| Creates default directory + file: + path: "{{ scale_extracted_path }}" + state: directory + mode: a+x + recurse: yes + +- name: install | Stat extracted packages + stat: + path: "{{ scale_extracted_path + '/' + scale_install_directory_pkg_path | basename }}" + register: scale_install_gpfs_packagedir + +# +# Copy installation directory package to default +# +- block: + - name: install | Copy installation package to node + copy: + src: "{{ scale_install_directory_pkg_path }}" + dest: "{{ scale_extracted_path }}" + mode: a+x + +- name: install | Set installation package path + set_fact: + dir_path: "{{ scale_extracted_path + '/' + scale_install_directory_pkg_path | basename }}" + +- name: install | gpfs base path + set_fact: + gpfs_path_url: "{{ dir_path }}" + when: scale_install_directory_pkg_path is defined + +# +# Find noobaa-core +# +# + +- block: ## when: host is defined as a protocol node + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ gpfs_path_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid GPFS (s3) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No S3 (noobaa-core) package found {{ gpfs_path_url }}noobaa-core*" + + - name: install | Add GPFS s3 package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_local_pkg.yml b/roles/s3_upgrade/tasks/install_local_pkg.yml new file mode 100644 index 00000000..27606923 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_local_pkg.yml @@ -0,0 +1,137 @@ +--- +# Local package installation method +- block: ## run_once: true + - name: install | Stat local installation package + stat: + path: "{{ scale_install_localpkg_path }}" + checksum_algorithm: md5 + register: scale_install_localpkg + + - name: install | Check local installation package + assert: + that: scale_install_localpkg.stat.exists + msg: >- + Please set the variable 'scale_install_localpkg_path' to point to the + local installation package (accessible on Ansible control machine)! + +# +# Optionally, verify package checksum +# + - name: install | Stat checksum file + stat: + path: "{{ scale_install_localpkg_path }}.md5" + register: scale_install_md5_file + + - block: ## when: scale_install_md5_file.stat.exists + - name: install | Read checksum from file + set_fact: + scale_install_md5_sum: "{{ lookup('file', scale_install_localpkg_path + '.md5') }}" + + - name: install | Compare checksums + assert: + that: scale_install_md5_sum.strip().split().0 == scale_install_localpkg.stat.checksum + msg: >- + Checksums don't match. Please check integritiy of your local + installation package! + + when: scale_install_md5_file.stat.exists + run_once: true + delegate_to: localhost + +# +# Copy installation package +# +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- block: ## when: not scale_install_gpfs_rpmdir.stat.exists + - name: install | Stat temporary directory + stat: + path: "{{ scale_install_localpkg_tmpdir_path }}" + register: scale_install_localpkg_tmpdir + + - name: install | Check temporary directory + assert: + that: + - scale_install_localpkg_tmpdir.stat.exists + - scale_install_localpkg_tmpdir.stat.isdir + msg: >- + Please set the variable 'scale_install_localpkg_tmpdir_path' to point + to a temporary directory on the remote system! + + - name: install | Copy installation package to node + copy: + src: "{{ scale_install_localpkg_path }}" + dest: "{{ scale_install_localpkg_tmpdir_path }}" + mode: a+x + when: not scale_install_gpfs_rpmdir.stat.exists + +# +# Extract installation package +# +- name: install | Extract installation package + vars: + localpkg: "{{ scale_install_localpkg_tmpdir_path + '/' + scale_install_localpkg_path | basename }}" + command: "{{ localpkg + ' --silent' }}" + args: + creates: "{{ s3_extracted_path }}" + +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Check extracted packages + assert: + that: + - scale_install_gpfs_rpmdir.stat.exists + - scale_install_gpfs_rpmdir.stat.isdir + msg: >- + The variable 'scale_version' doesn't seem to match the contents of the + local installation package! + +# Delete installation package +- name: install | Delete installation package from node + file: + path: "{{ scale_install_localpkg_tmpdir_path + '/' + scale_install_localpkg_path | basename }}" + state: absent + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +# Find s3 rpms +- block: ## when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ s3_extracted_path }}/{{ scale_s3_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid (noobaa-core) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No noobaa-core (noobaa-core) package found {{ s3_extracted_path }}/{{ scale_s3_url }}noobaa-core*" + + - name: install | Add noobaa-core package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + + when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_remote_pkg.yml b/roles/s3_upgrade/tasks/install_remote_pkg.yml new file mode 100644 index 00000000..56227dc2 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_remote_pkg.yml @@ -0,0 +1,109 @@ +--- +# Remote package installation method + +- name: install | Stat remote installation package + stat: + path: "{{ scale_install_remotepkg_path }}" + checksum_algorithm: md5 + register: scale_install_remotepkg + +- name: install | Check remote installation package + assert: + that: scale_install_remotepkg.stat.exists + msg: >- + Please set the variable 'scale_install_remotepkg_path' to point to the + remote installation package (accessible on Ansible managed node)! + +# +# Optionally, verify package checksum +# +- name: install | Stat checksum file + stat: + path: "{{ scale_install_remotepkg_path }}.md5" + register: scale_install_md5_file + +- block: ## when: scale_install_md5_file.stat.exists + - name: install | Read checksum from file + slurp: + src: "{{ scale_install_remotepkg_path }}.md5" + register: scale_install_md5_sum + + - name: install | Compare checksums + vars: + md5sum: "{{ scale_install_md5_sum.content | b64decode }}" + assert: + that: md5sum.strip().split().0 == scale_install_remotepkg.stat.checksum + msg: >- + Checksums don't match. Please check integritiy of your remote + installation package! + when: scale_install_md5_file.stat.exists + +# +# Extract installation package +# +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Make installation package executable + file: + path: "{{ scale_install_remotepkg_path }}" + mode: a+x + when: not scale_install_gpfs_rpmdir.stat.exists + +- name: install | Extract installation package + command: "{{ scale_install_remotepkg_path + ' --silent' }}" + args: + creates: "{{ s3_extracted_path }}" + +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Check extracted packages + assert: + that: + - scale_install_gpfs_rpmdir.stat.exists + - scale_install_gpfs_rpmdir.stat.isdir + msg: >- + The variable 'scale_version' doesn't seem to match the contents of the + remote installation package! + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +# Find s3 rpms +- block: ## when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ s3_extracted_path }}/{{ scale_s3_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid noobaa-core (noobaa-core) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No S3 (noobaa-core) package found {{ s3_extracted_path }}/{{ scale_s3_url }}gpfs.s3*" + + - name: install | Add GPFS s3 package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + + when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_repository.yml b/roles/s3_upgrade/tasks/install_repository.yml new file mode 100644 index 00000000..201d7e69 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_repository.yml @@ -0,0 +1,31 @@ +--- +- name: upgrade | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: upgrade | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +- name: upgrade | Configure s3 YUM repository + yum_repository: + name: spectrum-scale-s3 + description: IBM Spectrum Scale (s3) + baseurl: "{{ scale_install_repository_url }}{{ scale_s3_url }}" + gpgcheck: "{{ scale_install_gpgcheck }}" + repo_gpgcheck: no + sslverify: no + state: present + notify: yum-clean-metadata + when: + - ansible_pkg_mgr == 'yum' or ansible_pkg_mgr == 'dnf' + - scale_install_repository_url is defined + - scale_install_repository_url != 'existing' + +- name: upgrade | Add GPFS s3 packages to list + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ item ] }}" + with_items: + - "{{ scale_s3_packages }}" diff --git a/roles/s3_upgrade/tasks/main.yml b/roles/s3_upgrade/tasks/main.yml new file mode 100644 index 00000000..fc33687b --- /dev/null +++ b/roles/s3_upgrade/tasks/main.yml @@ -0,0 +1,4 @@ +--- +# Install IBM Spectrum Scale (S3) +- import_tasks: install.yml + tags: upgrade diff --git a/roles/s3_upgrade/tasks/yum/install.yml b/roles/s3_upgrade/tasks/yum/install.yml new file mode 100644 index 00000000..9ddbc12e --- /dev/null +++ b/roles/s3_upgrade/tasks/yum/install.yml @@ -0,0 +1,6 @@ +--- +- name: upgrade | Upgrade GPFS S3 packages + yum: + name: "{{ scale_install_all_packages }}" + state: latest + disable_gpg_check: "{{ scale_disable_gpgcheck }}" diff --git a/roles/s3_upgrade/tasks/zypper/install.yml b/roles/s3_upgrade/tasks/zypper/install.yml new file mode 100644 index 00000000..2ea66d79 --- /dev/null +++ b/roles/s3_upgrade/tasks/zypper/install.yml @@ -0,0 +1,6 @@ +--- +- name: upgrade | Upgrade GPFS S3 packages + zypper: + name: "{{ scale_install_all_packages }}" + state: latest + disable_gpg_check: no diff --git a/roles/s3_upgrade/vars/main.yml b/roles/s3_upgrade/vars/main.yml new file mode 100644 index 00000000..5a6e9c01 --- /dev/null +++ b/roles/s3_upgrade/vars/main.yml @@ -0,0 +1,10 @@ +--- +# Variables for the IBM Spectrum Scale (GPFS) role - +# these variables are *not* meant to be overridden + +## Compute RPM version from Spectrum Scale version +scale_rpmversion: "{{ scale_version | regex_replace('^([0-9.]+)\\.([0-9])$', '\\1-\\2') }}" + +## Default scale extraction path +scale_extracted_default_path: "/usr/lpp/mmfs" +scale_extracted_path: "{{ scale_extracted_default_path }}/{{ scale_version }}" From abbe501fcdc36e10a507fd07a6fe20c0212adfab Mon Sep 17 00:00:00 2001 From: sujeet Date: Wed, 8 Jan 2025 18:28:01 +0100 Subject: [PATCH 4/8] Fixed callhome defect fixes Signed-off-by: sujeet --- roles/callhome_configure/tasks/configure.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/callhome_configure/tasks/configure.yml b/roles/callhome_configure/tasks/configure.yml index 14274003..c87df6f1 100755 --- a/roles/callhome_configure/tasks/configure.yml +++ b/roles/callhome_configure/tasks/configure.yml @@ -54,7 +54,7 @@ - name: configure| Setup the call home customer configuration shell: - cmd: "{{ scale_command_path }}mmcallhome info change --customer-name {{ scale_callhome_params.customer_name }} --customer-id {{ scale_callhome_params.customer_id }} --email {{ scale_callhome_params.customer_email}} --country-code {{ scale_callhome_params.customer_country }}" + cmd: "{{ scale_command_path }}mmcallhome info change --customer-name \"{{ scale_callhome_params.customer_name }}\" --customer-id {{ scale_callhome_params.customer_id }} --email {{ scale_callhome_params.customer_email}} --country-code {{ scale_callhome_params.customer_country }}" register: scale_callhome_customer_config - debug: From 7b8484d48948e88707fe61ef9f389b484a8427db Mon Sep 17 00:00:00 2001 From: sujeet Date: Mon, 20 Jan 2025 06:56:12 +0100 Subject: [PATCH 5/8] README file update for CES S3 and supported OS Signed-off-by: sujeet --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index b37d74d8..7f157e72 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,9 @@ Features - [x] Support for RHEL 7 on x86_64, PPC64 and PPC64LE - [x] Support for RHEL 8 on x86_64 and PPC64LE +- [x] Support for RHEL 9 on x86_64 and PPC64LE - [x] Support for UBUNTU 20 on x86_64 and PPC64LE +- [x] Support for UBUNTU 22 on x86_64 and PPC64LE - [x] Support for SLES 15 on x86_64 and PPC64LE #### Common prerequisites @@ -91,6 +93,7 @@ Features - [x] Install IBM Storage Scale SMB or NFS on selected cluster nodes (5.0.5.2 and above) - [x] Install IBM Storage Scale Object on selected cluster nodes (5.1.1.0 and above) +- [x] Install IBM Storage Scale S3 on selected cluster nodes (5.2.0.0 and above) - [x] CES IPV4 or IPV6 support - [x] CES interface mode support @@ -107,12 +110,14 @@ The following IBM Storage Scale versions are tested: - 5.0.4.0 and above - 5.0.5.2 and above for CES (SMB and NFS) - 5.1.1.0 and above for CES (Object) +- 5.2.0.0 and above for CES (S3) - **Refer to the [Release Notes](https://github.com/IBM/ibm-spectrum-scale-install-infra/releases) for details** Specific OS requirements: - For CES (SMB/NFS) on SLES15: Python 3 is required. - For CES (Object): RhedHat 8.x is required. +- For CES (S3): RhedHat 8.x or RhedHat 9.x is required. Prerequisites ------------- @@ -308,6 +313,7 @@ The following [roles](https://docs.ansible.com/ansible/latest/user_guide/playboo - HDFS (`roles/hdfs_*`) - Call Home (`roles/callhome_*`) - File Audit Logging (`roles/fal_*`) +- S3 (`roles/s3_*`) - ... Note that [Core GPFS](roles/core) is the only mandatory role, all other roles are optional. Each of the optional roles requires additional configuration variables. Browse the examples in the [samples/](samples/) directory to learn how to: From bffd367d25b895bd9c1a21e010ae802e9a91256a Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Thu, 31 Jul 2025 15:08:07 +0200 Subject: [PATCH 6/8] Fixed SLES15 issue fix Signed-off-by: Rajan Mishra --- roles/fal_install/tasks/install_repository.yml | 1 + roles/gui_install/tasks/install_repository.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/roles/fal_install/tasks/install_repository.yml b/roles/fal_install/tasks/install_repository.yml index f98af3fd..5d691864 100644 --- a/roles/fal_install/tasks/install_repository.yml +++ b/roles/fal_install/tasks/install_repository.yml @@ -86,6 +86,7 @@ repo: "{{ scale_install_repository_url }}{{ scale_fal_url }}" disable_gpg_check: no state: present + overwrite_multiple: yes when: - ansible_pkg_mgr == 'zypper' - scale_install_repository_url is defined diff --git a/roles/gui_install/tasks/install_repository.yml b/roles/gui_install/tasks/install_repository.yml index 70396d60..446e1cd5 100644 --- a/roles/gui_install/tasks/install_repository.yml +++ b/roles/gui_install/tasks/install_repository.yml @@ -37,7 +37,7 @@ zypper_repository: name: spectrum-scale-gui description: IBM Spectrum Scale (GUI) - repo: "{{ scale_install_repository_url }}/gpfs_rpms/" + repo: "{{ scale_install_repository_url }}gpfs_rpms/" disable_gpg_check: no state: present overwrite_multiple: yes From 064ce8fbdfacfd7b83c6ee0b597aff9b3284d822 Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Fri, 1 Aug 2025 08:42:31 +0200 Subject: [PATCH 7/8] Fixed SLES15 issue Signed-off-by: Rajan Mishra --- roles/gui_upgrade/tasks/install_repository.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/gui_upgrade/tasks/install_repository.yml b/roles/gui_upgrade/tasks/install_repository.yml index d548e2d3..3ae49a57 100644 --- a/roles/gui_upgrade/tasks/install_repository.yml +++ b/roles/gui_upgrade/tasks/install_repository.yml @@ -44,7 +44,7 @@ zypper_repository: name: spectrum-scale-gui description: IBM Spectrum Scale (GUI) - repo: "{{ scale_install_repository_url }}/gpfs_rpms/" + repo: "{{ scale_install_repository_url }}gpfs_rpms/" disable_gpg_check: no state: present overwrite_multiple: yes From 5d2c0c2a2ddf6d8eab9007e66e0c2ff7154595b4 Mon Sep 17 00:00:00 2001 From: Ananya Singla Date: Thu, 11 Sep 2025 15:49:55 +0530 Subject: [PATCH 8/8] Ansible role to open required scale firweall ports Signed-off-by: Ananya Singla --- roles/scale_firewall_config/README.md | 1 + roles/scale_firewall_config/defaults/main.yml | 18 +++ .../scale_firewall_config/group_vars/all.yml | 13 ++ roles/scale_firewall_config/handlers/main.yml | 3 + roles/scale_firewall_config/meta/main.yml | 0 roles/scale_firewall_config/tasks/main.yml | 134 ++++++++++++++++++ .../scale_firewall_config/tests/inventory.yml | 3 + .../scale_firewall_config/tests/playbook.yml | 5 + 8 files changed, 177 insertions(+) create mode 100644 roles/scale_firewall_config/README.md create mode 100755 roles/scale_firewall_config/defaults/main.yml create mode 100644 roles/scale_firewall_config/group_vars/all.yml create mode 100755 roles/scale_firewall_config/handlers/main.yml create mode 100755 roles/scale_firewall_config/meta/main.yml create mode 100755 roles/scale_firewall_config/tasks/main.yml create mode 100755 roles/scale_firewall_config/tests/inventory.yml create mode 100755 roles/scale_firewall_config/tests/playbook.yml diff --git a/roles/scale_firewall_config/README.md b/roles/scale_firewall_config/README.md new file mode 100644 index 00000000..fe840054 --- /dev/null +++ b/roles/scale_firewall_config/README.md @@ -0,0 +1 @@ +../../README.md \ No newline at end of file diff --git a/roles/scale_firewall_config/defaults/main.yml b/roles/scale_firewall_config/defaults/main.yml new file mode 100755 index 00000000..cc28f2e1 --- /dev/null +++ b/roles/scale_firewall_config/defaults/main.yml @@ -0,0 +1,18 @@ +firewall: + # - { port: 80, protocol: http } + # - { port: 443, protocol: https } + # - { port: 22, protocol: ssh } + # - { port: 20, protocol: ftp } + # - { port: 21, protocol: ftp } + # - { port: 25, protocol: smtp } + # - { port: 110, protocol: pop3 } + # - { port: 143, protocol: imap } + # - { port: 53, protocol: dns } + # - { port: 123, protocol: ntp } + # - { port: 23, protocol: telnet } + # - { port: 445, protocol: smb } + + + + + diff --git a/roles/scale_firewall_config/group_vars/all.yml b/roles/scale_firewall_config/group_vars/all.yml new file mode 100644 index 00000000..2edfcde0 --- /dev/null +++ b/roles/scale_firewall_config/group_vars/all.yml @@ -0,0 +1,13 @@ +firewall: + - { port: 80, protocol: tcp } + - { port: 22, protocol: tcp } + - { port: 443, protocol: tcp } + + +required_ports: + - { port: 80, protocol: tcp} + - { port: 443, protocol: tcp } + - { port: 22, protocol: tcp } + + + diff --git a/roles/scale_firewall_config/handlers/main.yml b/roles/scale_firewall_config/handlers/main.yml new file mode 100755 index 00000000..eaae5b50 --- /dev/null +++ b/roles/scale_firewall_config/handlers/main.yml @@ -0,0 +1,3 @@ +--- +- name: Reload firewalld + command: "firewall-cmd --reload" diff --git a/roles/scale_firewall_config/meta/main.yml b/roles/scale_firewall_config/meta/main.yml new file mode 100755 index 00000000..e69de29b diff --git a/roles/scale_firewall_config/tasks/main.yml b/roles/scale_firewall_config/tasks/main.yml new file mode 100755 index 00000000..2ea7623b --- /dev/null +++ b/roles/scale_firewall_config/tasks/main.yml @@ -0,0 +1,134 @@ +--- +# 1) install and start firewalld +- name: Debug - About to install firewalld + debug: + msg: "Executing: yum install firewalld -y" + +- name: Install firewalld (if not installed) + yum: + name: firewalld + state: present + register: install_firewalld + +- name: Debug - Firewalld install output + debug: + msg: "{{ install_firewalld.stdout }}" + when: install_firewalld.rc == 0 and install_firewalld.stdout is defined + +- name: Start and enable firewalld + service: + name: firewalld + state: started + enabled: yes + register: start_firewalld + +- name: Debug - Firewalld start output + debug: + var: start_firewalld + when: install_firewalld is changed + +# 2)querying firewalld +- name: Debug - Executing Precheck List current firewalld configuration + debug: + msg: "Executing command: firewall-cmd --list-all" + +- name: Precheck List current firewalld configuration + command: firewall-cmd --list-all + register: firewalld_config_precheck + changed_when: false # so failures here don’t show as “changed” + +- name: Debug - Show stdout if precheck succeeded + debug: + msg: "{{ firewalld_config_precheck.stdout }}" + when: firewalld_config_precheck.rc == 0 and firewalld_config_precheck.stdout is defined + +- name: Debug - Show stderr if precheck failed + debug: + msg: "Precheck error: {{ firewalld_config_precheck.stderr }}" + when: firewalld_config_precheck.rc != 0 and firewalld_config_precheck.stderr is defined + +# 3) extracting open ports +- name: Extract open ports from firewalld config + set_fact: + open_ports: "{{ firewalld_config_precheck.stdout | regex_findall('(\\d+)/tcp') | map('int') | list }}" + +- name: Identify missing required ports + set_fact: + missing_ports: "{{ required_ports | map(attribute='port') | difference(open_ports) }}" + +- name: Debug - Missing ports before applying changes + debug: + msg: "Missing ports: {{ missing_ports | join(', ') }}" + when: missing_ports | length > 0 + +- name: Warn if required ports are missing + debug: + msg: "WARNING: Required ports not open: {{ missing_ports | join(', ') }}. They must be added to all.yml under 'firewall:' to proceed." + when: missing_ports | length > 0 + +- name: Debug - Executing open-ports commands + debug: + msg: "Executing: firewall-cmd --permanent --add-port={{ item.port }}/{{ item.protocol | default('tcp') }}" + loop: "{{ firewall }}" + +- name: Open all ports defined in 'firewall' variable + firewalld: + port: "{{ item.port }}/{{ item.protocol | default('tcp') }}" + permanent: yes + state: enabled + loop: "{{ firewall }}" + when: firewall is defined and firewall | length > 0 + notify: Reload firewalld + +# 4) reloading and post check firewalld +- name: Debug - Executing firewalld reload + debug: + msg: "Executing command: firewall-cmd --reload" + +- name: Reload firewalld + command: firewall-cmd --reload + register: reload_firewalld + changed_when: false + +- name: Debug - Reload stdout + debug: + msg: "{{ reload_firewalld.stdout }}" + when: reload_firewalld.rc == 0 and reload_firewalld.stdout is defined + +- name: Debug - Reload stderr + debug: + msg: "Reload error: {{ reload_firewalld.stderr }}" + when: reload_firewalld.rc != 0 and reload_firewalld.stderr is defined + +- name: Debug - Executing post-check list-all + debug: + msg: "Executing command: firewall-cmd --list-all" + +- name: Post-check List updated firewalld configuration + command: firewall-cmd --list-all + register: firewalld_config_postcheck + changed_when: false + +- name: Debug - Show post-check stdout + debug: + msg: "{{ firewalld_config_postcheck.stdout }}" + +- name: Extract open ports after changes + set_fact: + open_ports_after: "{{ firewalld_config_postcheck.stdout | regex_findall('(\\d+)/tcp') | map('int') | list }}" + +- name: Identify remaining missing ports + set_fact: + missing_ports_after: "{{ required_ports | map(attribute='port') | difference(open_ports_after) }}" + +- name: Debug - Ports still missing + debug: + msg: "Ports still missing after changes: {{ missing_ports_after | join(', ') }}" + when: missing_ports_after | length > 0 + +- name: Fail if required ports are still missing + fail: + msg: "ERROR: The following required ports are STILL missing: {{ missing_ports_after | join(', ') }}. Please add them to 'all.yml' under 'firewall:' and retry!" + when: missing_ports_after | length > 0 + + diff --git a/roles/scale_firewall_config/tests/inventory.yml b/roles/scale_firewall_config/tests/inventory.yml new file mode 100755 index 00000000..46fb0314 --- /dev/null +++ b/roles/scale_firewall_config/tests/inventory.yml @@ -0,0 +1,3 @@ +[cluster01] +ess-11 ansible_host=192.168.100.100 ansible_user=root +ess-12 ansible_host=192.168.100.101 ansible_user=root diff --git a/roles/scale_firewall_config/tests/playbook.yml b/roles/scale_firewall_config/tests/playbook.yml new file mode 100755 index 00000000..71689507 --- /dev/null +++ b/roles/scale_firewall_config/tests/playbook.yml @@ -0,0 +1,5 @@ +- name: Configure firewall ports on ESS cluster nodes + hosts: cluster01 + become: yes + roles: + - scale_firewall_config