Skip to content

Commit

Permalink
Merge pull request #4375 from julpark-rh/jan14
Browse files Browse the repository at this point in the history
Merging volume mgmt test cases
  • Loading branch information
mergify[bot] authored Jan 27, 2025
2 parents d918097 + 8ddcbfb commit 8ad0c2d
Show file tree
Hide file tree
Showing 11 changed files with 732 additions and 855 deletions.
12 changes: 0 additions & 12 deletions suites/quincy/cephfs/tier-2_cephfs_test-volume-management.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -337,15 +337,3 @@ tests:
polarion-id: CEPH-11333
desc: File system life cycle
abort-on-fail: false
- test:
name: volume related scenarios(delete,rename)
module: cephfs_vol_management.cephfs_vol_mgmt_volume_scenarios.py
polarion-id: CEPH-83603354
desc: volume related scenarios(delete,rename)
abort-on-fail: false
- test:
name: cephfs subvolumegroup scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_subvolgroup_scenarios.py
polarion-id: CEPH-83604079
desc: cephfs subvolumegroup scenarios
abort-on-fail: false
6 changes: 6 additions & 0 deletions suites/quincy/cephfs/tier-2_file-dir-lay_vol-mgmt_nfs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -476,3 +476,9 @@ tests:
polarion-id: CEPH-83604097
desc: Test for validating all CephFS Volume Operations
abort-on-fail: false
- test:
name: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_rename_earmark_subvolume.py
polarion-id: CEPH-83604978
desc: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
abort-on-fail: false
12 changes: 0 additions & 12 deletions suites/reef/cephfs/tier-2_cephfs_test-volume-management.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -334,15 +334,3 @@ tests:
polarion-id: CEPH-11333
desc: File system life cycle
abort-on-fail: false
- test:
name: volume related scenarios(delete,rename)
module: cephfs_vol_management.cephfs_vol_mgmt_volume_scenarios.py
polarion-id: CEPH-83603354
desc: volume related scenarios(delete,rename)
abort-on-fail: false
- test:
name: cephfs subvolumegroup scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_subvolgroup_scenarios.py
polarion-id: CEPH-83604079
desc: cephfs subvolumegroup scenarios
abort-on-fail: false
6 changes: 6 additions & 0 deletions suites/reef/cephfs/tier-2_file-dir-lay_vol-mgmt_nfs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -476,3 +476,9 @@ tests:
polarion-id: CEPH-83604097
desc: Test for validating all CephFS Volume Operations
abort-on-fail: false
- test:
name: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_rename_earmark_subvolume.py
polarion-id: CEPH-83604978
desc: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
abort-on-fail: false
18 changes: 0 additions & 18 deletions suites/squid/cephfs/tier-2_cephfs_test-volume-management_arch.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -334,21 +334,3 @@ tests:
polarion-id: CEPH-11333
desc: File system life cycle
abort-on-fail: false
- test:
name: volume related scenarios(delete,rename)
module: cephfs_vol_management.cephfs_vol_mgmt_volume_scenarios.py
polarion-id: CEPH-83603354
desc: volume related scenarios(delete,rename)
abort-on-fail: false
- test:
name: cephfs subvolumegroup scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_subvolgroup_scenarios.py
polarion-id: CEPH-83604079
desc: cephfs subvolumegroup scenarios
abort-on-fail: false
- test:
name: cephfs subvolume idempoence earmark
module: cephfs_vol_management.cephfs_vol_mgmt_subvol_idempotence_earmark.py
polarion-id: CEPH-83604184
desc: cephfs subvolume idempoence earmark
abort-on-fail: false
24 changes: 6 additions & 18 deletions suites/squid/cephfs/tier-2_file-dir-lay_vol-mgmt_nfs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -470,27 +470,15 @@ tests:
polarion-id: CEPH-83604070
desc: Test fs volume creation, run IO & deletion in loop for 5 times
abort-on-fail: false
- test:
name: volume related scenarios(delete,rename)
module: cephfs_vol_management.cephfs_vol_mgmt_volume_scenarios.py
polarion-id: CEPH-83603354
desc: volume related scenarios(delete,rename)
abort-on-fail: false
- test:
name: cephfs subvolumegroup scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_subvolgroup_scenarios.py
polarion-id: CEPH-83604079
desc: cephfs subvolumegroup scenarios
abort-on-fail: false
- test:
name: cephfs subvolume idempoence earmark
module: cephfs_vol_management.cephfs_vol_mgmt_subvol_idempotence_earmark.py
polarion-id: CEPH-83604184
desc: cephfs subvolume idempoence earmark
abort-on-fail: false
- test:
name: CephFS Volume Operations
module: cephfs_vol_management.cephfs_vol_mgmt_test_volume.py
polarion-id: CEPH-83604097
desc: Test for validating all CephFS Volume Operations
abort-on-fail: false
- test:
name: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
module: cephfs_vol_management.cephfs_vol_mgmt_rename_earmark_subvolume.py
polarion-id: CEPH-83604978
desc: volume rename, subvolume earmark, subvolumegroup idempotence scenarios
abort-on-fail: false
37 changes: 37 additions & 0 deletions tests/cephfs/cephfs_utilsV1.py
Original file line number Diff line number Diff line change
Expand Up @@ -5702,3 +5702,40 @@ def validate_dicts(self, dicts, keys_to_check):
log.error(f"Key '{key}' mismatch: Values = {values}")
return False
return True

def rename_volume(self, client, old_name, new_name):
log.info(f"[Fail {old_name} before renaming it]")
client.exec_command(
sudo=True, cmd=f"ceph fs fail {old_name} --yes-i-really-mean-it"
)
log.info("[Set refuse_client_session to true]")
client.exec_command(
sudo=True, cmd=f"ceph fs set {old_name} refuse_client_session true"
)
log.info("[Rename the volume]")
rename_cmd = f"ceph fs rename {old_name} {new_name} --yes-i-really-mean-it"
out, ec = client.exec_command(sudo=True, cmd=rename_cmd)
if "renamed." not in ec:
log.error(ec)
log.error(f"Failed to rename the volume: {out}")
return 1
out, ec = client.exec_command(sudo=True, cmd="ceph fs ls")
if new_name not in out:
log.error(f"Volume not renamed: {out}")
return 1
log.info(f"Volume renamed successfully: {out}")
log.info("Put it back to previous state")
client.exec_command(
sudo=True, cmd="ceph fs set " + new_name + " refuse_client_session false"
)
client.exec_command(sudo=True, cmd=f"ceph fs set {new_name} joinable true")
timer = 10
while timer > 0:
out, ec = client.exec_command(sudo=True, cmd=f"ceph fs status {new_name}")
if "active" in out:
break
time.sleep(5)
timer -= 1
log.info(f"Volume {new_name} is active now")
log.info("Renaming and verification of volume successful")
return 0
Loading

0 comments on commit 8ad0c2d

Please sign in to comment.