Skip to content

Commit

Permalink
Merging volume mgmt test cases
Browse files Browse the repository at this point in the history
Signed-off-by: julpark <[email protected]>
  • Loading branch information
julpark-rh committed Jan 16, 2025
1 parent 19d526a commit 39587b4
Show file tree
Hide file tree
Showing 11 changed files with 732 additions and 843 deletions.
12 changes: 0 additions & 12 deletions suites/quincy/cephfs/tier-2_cephfs_test-volume-management.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -343,15 +343,3 @@ tests:
polarion-id: CEPH-11333
desc: File system life cycle
abort-on-fail: false
- test:
name: volume related scenarios(delete,rename)
module: cephfs_vol_management.cephfs_vol_mgmt_volume_scenarios.py
polarion-id: CEPH-83603354
desc: volume related scenarios(delete,rename)
abort-on-fail: false
- test:
name: cephfs subvolumegroup scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_subvolgroup_scenarios.py
polarion-id: CEPH-83604079
desc: cephfs subvolumegroup scenarios
abort-on-fail: false
6 changes: 6 additions & 0 deletions suites/quincy/cephfs/tier-2_file-dir-lay_vol-mgmt_nfs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -482,3 +482,9 @@ tests:
polarion-id: CEPH-83604097
desc: Basic info validation after volume creation and deletion
abort-on-fail: true
- test:
name: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_rename_earmark_subvolume.py
polarion-id: CEPH-83604978
desc: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
abort-on-fail: false
12 changes: 0 additions & 12 deletions suites/reef/cephfs/tier-2_cephfs_test-volume-management.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -340,15 +340,3 @@ tests:
polarion-id: CEPH-11333
desc: File system life cycle
abort-on-fail: false
- test:
name: volume related scenarios(delete,rename)
module: cephfs_vol_management.cephfs_vol_mgmt_volume_scenarios.py
polarion-id: CEPH-83603354
desc: volume related scenarios(delete,rename)
abort-on-fail: false
- test:
name: cephfs subvolumegroup scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_subvolgroup_scenarios.py
polarion-id: CEPH-83604079
desc: cephfs subvolumegroup scenarios
abort-on-fail: false
6 changes: 6 additions & 0 deletions suites/reef/cephfs/tier-2_file-dir-lay_vol-mgmt_nfs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -482,3 +482,9 @@ tests:
polarion-id: CEPH-83604097
desc: Basic info validation after volume creation and deletion
abort-on-fail: true
- test:
name: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_rename_earmark_subvolume.py
polarion-id: CEPH-83604978
desc: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
abort-on-fail: false
18 changes: 0 additions & 18 deletions suites/squid/cephfs/tier-2_cephfs_test-volume-management_arch.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -340,21 +340,3 @@ tests:
polarion-id: CEPH-11333
desc: File system life cycle
abort-on-fail: false
- test:
name: volume related scenarios(delete,rename)
module: cephfs_vol_management.cephfs_vol_mgmt_volume_scenarios.py
polarion-id: CEPH-83603354
desc: volume related scenarios(delete,rename)
abort-on-fail: false
- test:
name: cephfs subvolumegroup scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_subvolgroup_scenarios.py
polarion-id: CEPH-83604079
desc: cephfs subvolumegroup scenarios
abort-on-fail: false
- test:
name: cephfs subvolume idempoence earmark
module: cephfs_vol_management.cephfs_vol_mgmt_subvol_idempotence_earmark.py
polarion-id: CEPH-83604184
desc: cephfs subvolume idempoence earmark
abort-on-fail: false
12 changes: 6 additions & 6 deletions suites/squid/cephfs/tier-2_file-dir-lay_vol-mgmt_nfs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -476,15 +476,15 @@ tests:
module: cephfs_nfs.nfs_multiple_export_using_single_conf.py
name: "nfs_multiple_export_using_single_conf"
polarion-id: "CEPH-83575082"
- test:
name: cephfs subvolume idempoence earmark
module: cephfs_vol_management.cephfs_vol_mgmt_subvol_idempotence_earmark.py
polarion-id: CEPH-83604184
desc: cephfs subvolume idempoence earmark
abort-on-fail: false
- test:
name: Basic info validation after volume creation and deletion
module: cephfs_vol_management.cephfs_vol_mgmt_test_volume.py
polarion-id: CEPH-83604097
desc: Basic info validation after volume creation and deletion
abort-on-fail: true
- test:
name: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_rename_earmark_subvolume.py
polarion-id: CEPH-83604978
desc: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
abort-on-fail: false
37 changes: 37 additions & 0 deletions tests/cephfs/cephfs_utilsV1.py
Original file line number Diff line number Diff line change
Expand Up @@ -5670,3 +5670,40 @@ def validate_dicts(self, dicts, keys_to_check):
log.error(f"Key '{key}' mismatch: Values = {values}")
return False
return True

def rename_volume(self, client, old_name, new_name):
log.info(f"[Fail {old_name} before renaming it]")
client.exec_command(
sudo=True, cmd=f"ceph fs fail {old_name} --yes-i-really-mean-it"
)
log.info("[Set refuse_client_session to true]")
client.exec_command(
sudo=True, cmd=f"ceph fs set {old_name} refuse_client_session true"
)
log.info("[Rename the volume]")
rename_cmd = f"ceph fs rename {old_name} {new_name} --yes-i-really-mean-it"
out, ec = client.exec_command(sudo=True, cmd=rename_cmd)
if "renamed." not in ec:
log.error(ec)
log.error(f"Failed to rename the volume: {out}")
return 1
out, ec = client.exec_command(sudo=True, cmd=f"ceph fs ls")
if new_name not in out:
log.error(f"Volume not renamed: {out}")
return 1
log.info(f"Volume renamed successfully: {out}")
log.info("Put it back to previous state")
client.exec_command(
sudo=True, cmd=f"ceph fs set {new_name} refuse_client_session false"
)
client.exec_command(sudo=True, cmd=f"ceph fs set {new_name} joinable true")
timer = 10
while timer > 0:
out, ec = client.exec_command(sudo=True, cmd=f"ceph fs status {new_name}")
if "active" in out:
break
time.sleep(5)
timer -= 1
log.info(f"Volume {new_name} is active now")
log.info("Renaming and verification of volume successful")
return 0
Loading

0 comments on commit 39587b4

Please sign in to comment.