addrvec: [] active_change: '0.000000' active_gid: 0 active_name: '' always_on_modules: nautilus: - balancer - crash - devicehealth - orchestrator_cli - progress - status - volumes available: false available_modules: [] epoch: 1 modules: - iostat - restful services: {} standbys: [] monmap: created: '2019-04-17 11:25:04.256284' epoch: 1 features: optional: [] persistent: - kraken - luminous - mimic - osdmap-prune - nautilus fsid: b6d61d41-5c6b-4c17-98be-f37217166318 min_mon_release: 14 min_mon_release_name: nautilus modified: '2019-04-17 11:25:04.256284' mons: - addr: 10.1.24.17:6789/0 name: e24-h17-740xd public_addr: 10.1.24.17:6789/0 public_addrs: addrvec: - addr: 10.1.24.17:3300 nonce: 0 type: v2 - addr: 10.1.24.17:6789 nonce: 0 type: v1 rank: 0 - addr: 10.1.24.19:6789/0 name: e24-h19-740xd public_addr: 10.1.24.19:6789/0 public_addrs: addrvec: - addr: 10.1.24.19:3300 nonce: 0 type: v2 - addr: 10.1.24.19:6789 nonce: 0 type: v1 rank: 1 - addr: 10.1.24.21:6789/0 name: e24-h21-740xd public_addr: 10.1.24.21:6789/0 public_addrs: addrvec: - addr: 10.1.24.21:3300 nonce: 0 type: v2 - addr: 10.1.24.21:6789 nonce: 0 type: v1 rank: 2 osdmap: osdmap: epoch: 1 full: false nearfull: false num_in_osds: 0 num_osds: 0 num_remapped_pgs: 0 num_up_osds: 0 pgmap: bytes_avail: 0 bytes_total: 0 bytes_used: 0 data_bytes: 0 num_objects: 0 num_pgs: 0 num_pools: 0 pgs_by_state: [] progress_events: {} quorum: - 0 - 1 - 2 quorum_age: 15 quorum_names: - e24-h17-740xd - e24-h19-740xd - e24-h21-740xd servicemap: epoch: 1 modified: '0.000000' services: {} TASK [ceph-facts : set_fact fsid from ceph_current_status] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:91 Wednesday 17 April 2019 11:25:23 +0000 (0:00:00.112) 0:01:50.185 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: fsid: b6d61d41-5c6b-4c17-98be-f37217166318 ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: fsid: b6d61d41-5c6b-4c17-98be-f37217166318 ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: fsid: b6d61d41-5c6b-4c17-98be-f37217166318 TASK [ceph-facts : generate cluster fsid] ******************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:98 Wednesday 17 April 2019 11:25:23 +0000 (0:00:00.109) 0:01:50.294 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact fsid] *************************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:105 Wednesday 17 April 2019 11:25:23 +0000 (0:00:00.038) 0:01:50.333 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact mds_name ansible_hostname] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:113 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.090) 0:01:50.424 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: mds_name: e24-h17-740xd ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: mds_name: e24-h19-740xd ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: mds_name: e24-h21-740xd TASK [ceph-facts : set_fact mds_name ansible_fqdn] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:119 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.104) 0:01:50.529 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rbd_client_directory_owner ceph] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:125 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.086) 0:01:50.615 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rbd_client_directory_group rbd_client_directory_group] ************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:132 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.087) 0:01:50.702 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rbd_client_directory_mode 0770] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:139 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.087) 0:01:50.790 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : resolve device link(s)] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:146 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.091) 0:01:50.881 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact build devices from resolved symlinks] ******************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:157 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.089) 0:01:50.971 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact build final devices list] ******************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:167 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.090) 0:01:51.061 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact devices generate device list when osd_auto_discovery] *************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:176 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.088) 0:01:51.149 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-1', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h17--740xd-swap', 'dm-uuid-LVM-1fMVcHXeDecRL26yNs2TGdh9nNZvIS7SxtIDMVGeZ1N91PI5xcKhJvTco1rYfVb4'], 'uuids': ['16def3c0-0b41-49fb-8328-1e8529a9bb66'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00450847a329be00230055976e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '8388608', 'sectorsize': '512', 'size': '4.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-1 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h17--740xd-swap - dm-uuid-LVM-1fMVcHXeDecRL26yNs2TGdh9nNZvIS7SxtIDMVGeZ1N91PI5xcKhJvTco1rYfVb4 labels: [] masters: [] uuids: - 16def3c0-0b41-49fb-8328-1e8529a9bb66 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '8388608' sectorsize: '512' serial: 00450847a329be00230055976e604609 size: 4.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdf', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e9755002300be3fc4e447c9', 'wwn-0x6d0946606e9755002300be3fc4e447c9'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00c947e4c43fbe00230055976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e9755002300be3fc4e447c9', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdf value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e9755002300be3fc4e447c9 - wwn-0x6d0946606e9755002300be3fc4e447c9 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00c947e4c43fbe00230055976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e9755002300be3fc4e447c9' skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme0n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_3.2TB_AIC__S3B1NA0JA01726', 'nvme-eui.334231304aa017260025384100000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 3.2TB AIC', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '6251233968', 'sectorsize': '512', 'size': '2.91 TB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme0n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_3.2TB_AIC__S3B1NA0JA01726 - nvme-eui.334231304aa017260025384100000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 3.2TB AIC partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '6251233968' sectorsize: '512' size: 2.91 TB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdd', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e9755002300be36b72edfbe', 'wwn-0x6d0946606e9755002300be36b72edfbe'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00bedf2eb736be00230055976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e9755002300be36b72edfbe', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdd value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e9755002300be36b72edfbe - wwn-0x6d0946606e9755002300be36b72edfbe labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00bedf2eb736be00230055976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e9755002300be36b72edfbe' skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdb', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e9755002300be2da9f906ee', 'wwn-0x6d0946606e9755002300be2da9f906ee'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00ee06f9a92dbe00230055976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e9755002300be2da9f906ee', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdb value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e9755002300be2da9f906ee - wwn-0x6d0946606e9755002300be2da9f906ee labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00ee06f9a92dbe00230055976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e9755002300be2da9f906ee' skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-2', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h17--740xd-home', 'dm-uuid-LVM-1fMVcHXeDecRL26yNs2TGdh9nNZvIS7SkUiMiv3Cg5Vy5i32fzcls6rW1lxpU43w'], 'uuids': ['5bcb4a4d-4cc0-4630-a4e8-bd27e5c12330'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00450847a329be00230055976e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '3399737344', 'sectorsize': '512', 'size': '1.58 TB', 'host': '', 'holders': []}}) => changed=false item: key: dm-2 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h17--740xd-home - dm-uuid-LVM-1fMVcHXeDecRL26yNs2TGdh9nNZvIS7SkUiMiv3Cg5Vy5i32fzcls6rW1lxpU43w labels: [] masters: [] uuids: - 5bcb4a4d-4cc0-4630-a4e8-bd27e5c12330 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '3399737344' sectorsize: '512' serial: 00450847a329be00230055976e604609 size: 1.58 TB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-1', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h19--740xd-swap', 'dm-uuid-LVM-wSEqfWPJ3Yncf4hVx3dwNPS8Y50FWzwpaQRh112RZX3yaj0OnNLFKW6n4aY4mOt6'], 'uuids': ['bc40aae9-4b50-4c6a-8809-ba8de0bcf24e'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00be189aa111960023009c986e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '8388608', 'sectorsize': '512', 'size': '4.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-1 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h19--740xd-swap - dm-uuid-LVM-wSEqfWPJ3Yncf4hVx3dwNPS8Y50FWzwpaQRh112RZX3yaj0OnNLFKW6n4aY4mOt6 labels: [] masters: [] uuids: - bc40aae9-4b50-4c6a-8809-ba8de0bcf24e model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '8388608' sectorsize: '512' serial: 00be189aa111960023009c986e604609 size: 4.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdg', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e9755002300be44cbf6b09a', 'wwn-0x6d0946606e9755002300be44cbf6b09a'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '009ab0f6cb44be00230055976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e9755002300be44cbf6b09a', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdg value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e9755002300be44cbf6b09a - wwn-0x6d0946606e9755002300be44cbf6b09a labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 009ab0f6cb44be00230055976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e9755002300be44cbf6b09a' skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdf', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e989c0023009627c36e0671', 'wwn-0x6d0946606e989c0023009627c36e0671'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0071066ec327960023009c986e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e989c0023009627c36e0671', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdf value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e989c0023009627c36e0671 - wwn-0x6d0946606e989c0023009627c36e0671 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 0071066ec327960023009c986e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e989c0023009627c36e0671' skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-0', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h17--740xd-root', 'dm-uuid-LVM-1fMVcHXeDecRL26yNs2TGdh9nNZvIS7S6EZJaFb0enOG6zwMNayHlhqdKVph2KFe'], 'uuids': ['9f5ce886-3529-4732-b618-f246c060cbf1'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00450847a329be00230055976e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '104857600', 'sectorsize': '512', 'size': '50.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-0 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h17--740xd-root - dm-uuid-LVM-1fMVcHXeDecRL26yNs2TGdh9nNZvIS7S6EZJaFb0enOG6zwMNayHlhqdKVph2KFe labels: [] masters: [] uuids: - 9f5ce886-3529-4732-b618-f246c060cbf1 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '104857600' sectorsize: '512' serial: 00450847a329be00230055976e604609 size: 50.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme0n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_3.2TB_AIC__S3B1NA0JA01732', 'nvme-eui.334231304aa017320025384100000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 3.2TB AIC', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '6251233968', 'sectorsize': '512', 'size': '2.91 TB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme0n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_3.2TB_AIC__S3B1NA0JA01732 - nvme-eui.334231304aa017320025384100000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 3.2TB AIC partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '6251233968' sectorsize: '512' size: 2.91 TB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sde', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e9755002300be3bbdfbfbf7', 'wwn-0x6d0946606e9755002300be3bbdfbfbf7'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00f7fbfbbd3bbe00230055976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e9755002300be3bbdfbfbf7', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sde value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e9755002300be3bbdfbfbf7 - wwn-0x6d0946606e9755002300be3bbdfbfbf7 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00f7fbfbbd3bbe00230055976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e9755002300be3bbdfbfbf7' skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdd', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e989c002300961eb58d3a51', 'wwn-0x6d0946606e989c002300961eb58d3a51'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00513a8db51e960023009c986e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e989c002300961eb58d3a51', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdd value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e989c002300961eb58d3a51 - wwn-0x6d0946606e989c002300961eb58d3a51 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00513a8db51e960023009c986e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e989c002300961eb58d3a51' skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdc', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e9755002300be31b07c0d44', 'wwn-0x6d0946606e9755002300be31b07c0d44'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00440d7cb031be00230055976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e9755002300be31b07c0d44', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdc value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e9755002300be31b07c0d44 - wwn-0x6d0946606e9755002300be31b07c0d44 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00440d7cb031be00230055976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e9755002300be31b07c0d44' skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdb', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e989c0023009615a852112f', 'wwn-0x6d0946606e989c0023009615a852112f'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '002f1152a815960023009c986e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e989c0023009615a852112f', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdb value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e989c0023009615a852112f - wwn-0x6d0946606e989c0023009615a852112f labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 002f1152a815960023009c986e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e989c0023009615a852112f' skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sda', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e9755002300be29a3470845', 'wwn-0x6d0946606e9755002300be29a3470845'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00450847a329be00230055976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e9755002300be29a3470845', 'partitions': {'sda2': {'links': {'ids': ['lvm-pv-uuid-USqB3R-nfhf-FgDV-6ipw-SbKK-i4xU-VhkpOp', 'scsi-36d0946606e9755002300be29a3470845-part2', 'wwn-0x6d0946606e9755002300be29a3470845-part2'], 'uuids': [], 'labels': [], 'masters': ['dm-0', 'dm-1', 'dm-2']}, 'start': '2099200', 'sectors': '3512989696', 'sectorsize': 512, 'size': '1.64 TB', 'uuid': None, 'holders': ['rhel_e24--h17--740xd-swap', 'rhel_e24--h17--740xd-home', 'rhel_e24--h17--740xd-root']}, 'sda1': {'links': {'ids': ['scsi-36d0946606e9755002300be29a3470845-part1', 'wwn-0x6d0946606e9755002300be29a3470845-part1'], 'uuids': ['8cb8f3e6-5255-4028-8667-ecfe88cd4c83'], 'labels': [], 'masters': []}, 'start': '2048', 'sectors': '2097152', 'sectorsize': 512, 'size': '1.00 GB', 'uuid': '8cb8f3e6-5255-4028-8667-ecfe88cd4c83', 'holders': []}}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sda value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e9755002300be29a3470845 - wwn-0x6d0946606e9755002300be29a3470845 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: sda1: holders: [] links: ids: - scsi-36d0946606e9755002300be29a3470845-part1 - wwn-0x6d0946606e9755002300be29a3470845-part1 labels: [] masters: [] uuids: - 8cb8f3e6-5255-4028-8667-ecfe88cd4c83 sectors: '2097152' sectorsize: 512 size: 1.00 GB start: '2048' uuid: 8cb8f3e6-5255-4028-8667-ecfe88cd4c83 sda2: holders: - rhel_e24--h17--740xd-swap - rhel_e24--h17--740xd-home - rhel_e24--h17--740xd-root links: ids: - lvm-pv-uuid-USqB3R-nfhf-FgDV-6ipw-SbKK-i4xU-VhkpOp - scsi-36d0946606e9755002300be29a3470845-part2 - wwn-0x6d0946606e9755002300be29a3470845-part2 labels: [] masters: - dm-0 - dm-1 - dm-2 uuids: [] sectors: '3512989696' sectorsize: 512 size: 1.64 TB start: '2099200' uuid: null removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00450847a329be00230055976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e9755002300be29a3470845' skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-2', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h19--740xd-home', 'dm-uuid-LVM-wSEqfWPJ3Yncf4hVx3dwNPS8Y50FWzwp4FWZSHAFPZY1fiHvGRrsAfWtWJDM2TDG'], 'uuids': ['07fbbe36-95a6-4487-b1e3-628d385beab9'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00be189aa111960023009c986e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '3399737344', 'sectorsize': '512', 'size': '1.58 TB', 'host': '', 'holders': []}}) => changed=false item: key: dm-2 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h19--740xd-home - dm-uuid-LVM-wSEqfWPJ3Yncf4hVx3dwNPS8Y50FWzwp4FWZSHAFPZY1fiHvGRrsAfWtWJDM2TDG labels: [] masters: [] uuids: - 07fbbe36-95a6-4487-b1e3-628d385beab9 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '3399737344' sectorsize: '512' serial: 00be189aa111960023009c986e604609 size: 1.58 TB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdh', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e9755002300be49d31a9573', 'wwn-0x6d0946606e9755002300be49d31a9573'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0073951ad349be00230055976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e9755002300be49d31a9573', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdh value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e9755002300be49d31a9573 - wwn-0x6d0946606e9755002300be49d31a9573 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 0073951ad349be00230055976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e9755002300be49d31a9573' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-1', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h21--740xd-swap', 'dm-uuid-LVM-Kdh47jCVeeN7lqw8LX3NwWRQ0WWUlKBsC5vz8MBGOHOnRElCkXmrugWgwtXI48bL'], 'uuids': ['3a3eb2e0-c66e-44a1-a272-7d4be69b8181'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '000993b7a5adbd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '8388608', 'sectorsize': '512', 'size': '4.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-1 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h21--740xd-swap - dm-uuid-LVM-Kdh47jCVeeN7lqw8LX3NwWRQ0WWUlKBsC5vz8MBGOHOnRElCkXmrugWgwtXI48bL labels: [] masters: [] uuids: - 3a3eb2e0-c66e-44a1-a272-7d4be69b8181 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '8388608' sectorsize: '512' serial: 000993b7a5adbd0023006b976e604609 size: 4.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdg', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e989c002300962cca8575a5', 'wwn-0x6d0946606e989c002300962cca8575a5'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00a57585ca2c960023009c986e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e989c002300962cca8575a5', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdg value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e989c002300962cca8575a5 - wwn-0x6d0946606e989c002300962cca8575a5 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00a57585ca2c960023009c986e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e989c002300962cca8575a5' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdf', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e976b002300bdc4c75c2f38', 'wwn-0x6d0946606e976b002300bdc4c75c2f38'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00382f5cc7c4bd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e976b002300bdc4c75c2f38', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdf value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e976b002300bdc4c75c2f38 - wwn-0x6d0946606e976b002300bdc4c75c2f38 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00382f5cc7c4bd0023006b976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e976b002300bdc4c75c2f38' skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-0', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h19--740xd-root', 'dm-uuid-LVM-wSEqfWPJ3Yncf4hVx3dwNPS8Y50FWzwpNiWgtVM8I0NFkw72d2rTc6aC3cJpbV6G'], 'uuids': ['b838865e-e30d-4b5f-a4cd-ab20928e9939'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00be189aa111960023009c986e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '104857600', 'sectorsize': '512', 'size': '50.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-0 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h19--740xd-root - dm-uuid-LVM-wSEqfWPJ3Yncf4hVx3dwNPS8Y50FWzwpNiWgtVM8I0NFkw72d2rTc6aC3cJpbV6G labels: [] masters: [] uuids: - b838865e-e30d-4b5f-a4cd-ab20928e9939 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '104857600' sectorsize: '512' serial: 00be189aa111960023009c986e604609 size: 50.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme0n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_3.2TB_AIC__S3B1NA0JA01778', 'nvme-eui.334231304aa017780025384100000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 3.2TB AIC', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '6251233968', 'sectorsize': '512', 'size': '2.91 TB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme0n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_3.2TB_AIC__S3B1NA0JA01778 - nvme-eui.334231304aa017780025384100000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 3.2TB AIC partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '6251233968' sectorsize: '512' size: 2.91 TB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sde', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e989c0023009623bc6e9afe', 'wwn-0x6d0946606e989c0023009623bc6e9afe'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00fe9a6ebc23960023009c986e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e989c0023009623bc6e9afe', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sde value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e989c0023009623bc6e9afe - wwn-0x6d0946606e989c0023009623bc6e9afe labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00fe9a6ebc23960023009c986e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e989c0023009623bc6e9afe' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdd', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e976b002300bdbab99e5156', 'wwn-0x6d0946606e976b002300bdbab99e5156'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0056519eb9babd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e976b002300bdbab99e5156', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdd value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e976b002300bdbab99e5156 - wwn-0x6d0946606e976b002300bdbab99e5156 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 0056519eb9babd0023006b976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e976b002300bdbab99e5156' skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdc', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e989c0023009619aee942ee', 'wwn-0x6d0946606e989c0023009619aee942ee'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00ee42e9ae19960023009c986e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e989c0023009619aee942ee', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdc value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e989c0023009619aee942ee - wwn-0x6d0946606e989c0023009619aee942ee labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00ee42e9ae19960023009c986e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e989c0023009619aee942ee' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdb', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e976b002300bdb2ac66ef62', 'wwn-0x6d0946606e976b002300bdb2ac66ef62'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0062ef66acb2bd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e976b002300bdb2ac66ef62', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdb value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e976b002300bdb2ac66ef62 - wwn-0x6d0946606e976b002300bdb2ac66ef62 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 0062ef66acb2bd0023006b976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e976b002300bdb2ac66ef62' skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sda', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e989c0023009611a19a18be', 'wwn-0x6d0946606e989c0023009611a19a18be'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00be189aa111960023009c986e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e989c0023009611a19a18be', 'partitions': {'sda2': {'links': {'ids': ['lvm-pv-uuid-jvt0JQ-utvf-kIKH-Cefz-9c5u-patn-w3Vltf', 'scsi-36d0946606e989c0023009611a19a18be-part2', 'wwn-0x6d0946606e989c0023009611a19a18be-part2'], 'uuids': [], 'labels': [], 'masters': ['dm-0', 'dm-1', 'dm-2']}, 'start': '2099200', 'sectors': '3512989696', 'sectorsize': 512, 'size': '1.64 TB', 'uuid': None, 'holders': ['rhel_e24--h19--740xd-swap', 'rhel_e24--h19--740xd-home', 'rhel_e24--h19--740xd-root']}, 'sda1': {'links': {'ids': ['scsi-36d0946606e989c0023009611a19a18be-part1', 'wwn-0x6d0946606e989c0023009611a19a18be-part1'], 'uuids': ['3a8cc274-3ada-481f-bea6-f3f01f501fb1'], 'labels': [], 'masters': []}, 'start': '2048', 'sectors': '2097152', 'sectorsize': 512, 'size': '1.00 GB', 'uuid': '3a8cc274-3ada-481f-bea6-f3f01f501fb1', 'holders': []}}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sda value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e989c0023009611a19a18be - wwn-0x6d0946606e989c0023009611a19a18be labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: sda1: holders: [] links: ids: - scsi-36d0946606e989c0023009611a19a18be-part1 - wwn-0x6d0946606e989c0023009611a19a18be-part1 labels: [] masters: [] uuids: - 3a8cc274-3ada-481f-bea6-f3f01f501fb1 sectors: '2097152' sectorsize: 512 size: 1.00 GB start: '2048' uuid: 3a8cc274-3ada-481f-bea6-f3f01f501fb1 sda2: holders: - rhel_e24--h19--740xd-swap - rhel_e24--h19--740xd-home - rhel_e24--h19--740xd-root links: ids: - lvm-pv-uuid-jvt0JQ-utvf-kIKH-Cefz-9c5u-patn-w3Vltf - scsi-36d0946606e989c0023009611a19a18be-part2 - wwn-0x6d0946606e989c0023009611a19a18be-part2 labels: [] masters: - dm-0 - dm-1 - dm-2 uuids: [] sectors: '3512989696' sectorsize: 512 size: 1.64 TB start: '2099200' uuid: null removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00be189aa111960023009c986e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e989c0023009611a19a18be' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-2', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h21--740xd-home', 'dm-uuid-LVM-Kdh47jCVeeN7lqw8LX3NwWRQ0WWUlKBsrnOWeD1fmRAePILdm4KQTgVOKWf9sGUD'], 'uuids': ['bf61e279-1d41-4268-aa66-7ec5fbfcb993'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '000993b7a5adbd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '3399737344', 'sectorsize': '512', 'size': '1.58 TB', 'host': '', 'holders': []}}) => changed=false item: key: dm-2 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h21--740xd-home - dm-uuid-LVM-Kdh47jCVeeN7lqw8LX3NwWRQ0WWUlKBsrnOWeD1fmRAePILdm4KQTgVOKWf9sGUD labels: [] masters: [] uuids: - bf61e279-1d41-4268-aa66-7ec5fbfcb993 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '3399737344' sectorsize: '512' serial: 000993b7a5adbd0023006b976e604609 size: 1.58 TB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdh', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e989c0023009630d03ef964', 'wwn-0x6d0946606e989c0023009630d03ef964'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0064f93ed030960023009c986e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e989c0023009630d03ef964', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdh value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e989c0023009630d03ef964 - wwn-0x6d0946606e989c0023009630d03ef964 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 0064f93ed030960023009c986e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e989c0023009630d03ef964' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdg', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e976b002300bdc8ce69cb67', 'wwn-0x6d0946606e976b002300bdc8ce69cb67'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0067cb69cec8bd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e976b002300bdc8ce69cb67', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdg value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e976b002300bdc8ce69cb67 - wwn-0x6d0946606e976b002300bdc8ce69cb67 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 0067cb69cec8bd0023006b976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e976b002300bdc8ce69cb67' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-0', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h21--740xd-root', 'dm-uuid-LVM-Kdh47jCVeeN7lqw8LX3NwWRQ0WWUlKBsJfIIs1HKN6CelNitmGKAjoLve6mUwcnM'], 'uuids': ['b0691d78-8250-4851-8bf4-0b591722dd1a'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '000993b7a5adbd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '104857600', 'sectorsize': '512', 'size': '50.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-0 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h21--740xd-root - dm-uuid-LVM-Kdh47jCVeeN7lqw8LX3NwWRQ0WWUlKBsJfIIs1HKN6CelNitmGKAjoLve6mUwcnM labels: [] masters: [] uuids: - b0691d78-8250-4851-8bf4-0b591722dd1a model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '104857600' sectorsize: '512' serial: 000993b7a5adbd0023006b976e604609 size: 50.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sde', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e976b002300bdbfc0735ce5', 'wwn-0x6d0946606e976b002300bdbfc0735ce5'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00e55c73c0bfbd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e976b002300bdbfc0735ce5', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sde value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e976b002300bdbfc0735ce5 - wwn-0x6d0946606e976b002300bdbfc0735ce5 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00e55c73c0bfbd0023006b976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e976b002300bdbfc0735ce5' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdc', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e976b002300bdb6b2f2cab2', 'wwn-0x6d0946606e976b002300bdb6b2f2cab2'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00b2caf2b2b6bd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e976b002300bdb6b2f2cab2', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdc value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e976b002300bdb6b2f2cab2 - wwn-0x6d0946606e976b002300bdb6b2f2cab2 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 00b2caf2b2b6bd0023006b976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e976b002300bdb6b2f2cab2' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sda', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e976b002300bdada5b79309', 'wwn-0x6d0946606e976b002300bdada5b79309'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '000993b7a5adbd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e976b002300bdada5b79309', 'partitions': {'sda2': {'links': {'ids': ['lvm-pv-uuid-QML8by-iIcB-hlXS-3b9F-Q1Fr-hRbi-EcH2Ik', 'scsi-36d0946606e976b002300bdada5b79309-part2', 'wwn-0x6d0946606e976b002300bdada5b79309-part2'], 'uuids': [], 'labels': [], 'masters': ['dm-0', 'dm-1', 'dm-2']}, 'start': '2099200', 'sectors': '3512989696', 'sectorsize': 512, 'size': '1.64 TB', 'uuid': None, 'holders': ['rhel_e24--h21--740xd-swap', 'rhel_e24--h21--740xd-home', 'rhel_e24--h21--740xd-root']}, 'sda1': {'links': {'ids': ['scsi-36d0946606e976b002300bdada5b79309-part1', 'wwn-0x6d0946606e976b002300bdada5b79309-part1'], 'uuids': ['961001e7-6ba8-4240-9cd5-34708069f5dd'], 'labels': [], 'masters': []}, 'start': '2048', 'sectors': '2097152', 'sectorsize': 512, 'size': '1.00 GB', 'uuid': '961001e7-6ba8-4240-9cd5-34708069f5dd', 'holders': []}}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sda value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e976b002300bdada5b79309 - wwn-0x6d0946606e976b002300bdada5b79309 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: sda1: holders: [] links: ids: - scsi-36d0946606e976b002300bdada5b79309-part1 - wwn-0x6d0946606e976b002300bdada5b79309-part1 labels: [] masters: [] uuids: - 961001e7-6ba8-4240-9cd5-34708069f5dd sectors: '2097152' sectorsize: 512 size: 1.00 GB start: '2048' uuid: 961001e7-6ba8-4240-9cd5-34708069f5dd sda2: holders: - rhel_e24--h21--740xd-swap - rhel_e24--h21--740xd-home - rhel_e24--h21--740xd-root links: ids: - lvm-pv-uuid-QML8by-iIcB-hlXS-3b9F-Q1Fr-hRbi-EcH2Ik - scsi-36d0946606e976b002300bdada5b79309-part2 - wwn-0x6d0946606e976b002300bdada5b79309-part2 labels: [] masters: - dm-0 - dm-1 - dm-2 uuids: [] sectors: '3512989696' sectorsize: 512 size: 1.64 TB start: '2099200' uuid: null removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 000993b7a5adbd0023006b976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e976b002300bdada5b79309' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sdh', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946606e976b002300bdcdd594a036', 'wwn-0x6d0946606e976b002300bdcdd594a036'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0036a094d5cdbd0023006b976e604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946606e976b002300bdcdd594a036', 'partitions': {}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '3515088896', 'sectorsize': '512', 'size': '1.64 TB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sdh value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946606e976b002300bdcdd594a036 - wwn-0x6d0946606e976b002300bdcdd594a036 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '3515088896' sectorsize: '512' serial: 0036a094d5cdbd0023006b976e604609 size: 1.64 TB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946606e976b002300bdcdd594a036' skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_uid for debian based system - non container] ******************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:189 Wednesday 17 April 2019 11:25:24 +0000 (0:00:00.177) 0:01:51.327 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_uid for red hat or suse based system - non container] ********************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:196 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.090) 0:01:51.418 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_uid: 167 ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_uid: 167 ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_uid: 167 TASK [ceph-facts : set_fact ceph_uid for debian based system - container] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:203 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.107) 0:01:51.525 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_uid for red hat based system - container] ********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:210 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.086) 0:01:51.612 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_uid for red hat] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:218 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.085) 0:01:51.698 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rgw_hostname] ******************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:225 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.090) 0:01:51.788 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact osd_pool_default_pg_num] ******************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:240 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.089) 0:01:51.878 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_pg_num: '8' ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_pg_num: '8' ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_pg_num: '8' TASK [ceph-facts : set_fact osd_pool_default_size] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:244 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.106) 0:01:51.985 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_size: '3' ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_size: '3' ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_size: '3' TASK [ceph-facts : set_fact osd_pool_default_min_size] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:248 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.106) 0:01:52.091 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_min_size: '0' ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_min_size: '0' ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_min_size: '0' TASK [ceph-facts : check if the ceph conf exists] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:252 Wednesday 17 April 2019 11:25:25 +0000 (0:00:00.157) 0:01:52.249 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: atime: 1555500304.231627 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: us-ascii checksum: 343501dbce18de60c572121f8ed6af687ab69c81 ctime: 1555500291.8891847 dev: 64768 device_type: 0 executable: false exists: true gid: 167 gr_name: ceph inode: 117518851 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: text/plain mode: '0644' mtime: 1555500291.702178 nlink: 1 path: /etc/ceph/ceph.conf pw_name: ceph readable: true rgrp: true roth: true rusr: true size: 747 uid: 167 version: '835175338' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: atime: 1555500304.2459917 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: us-ascii checksum: 343501dbce18de60c572121f8ed6af687ab69c81 ctime: 1555500291.886564 dev: 64768 device_type: 0 executable: false exists: true gid: 167 gr_name: ceph inode: 41982465 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: text/plain mode: '0644' mtime: 1555500291.7515593 nlink: 1 path: /etc/ceph/ceph.conf pw_name: ceph readable: true rgrp: true roth: true rusr: true size: 747 uid: 167 version: '3598395740' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: atime: 1555500304.2801602 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: us-ascii checksum: 343501dbce18de60c572121f8ed6af687ab69c81 ctime: 1555500291.8907492 dev: 64768 device_type: 0 executable: false exists: true gid: 167 gr_name: ceph inode: 41974826 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: text/plain mode: '0644' mtime: 1555500291.7767456 nlink: 1 path: /etc/ceph/ceph.conf pw_name: ceph readable: true rgrp: true roth: true rusr: true size: 747 uid: 167 version: '3468493449' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false TASK [ceph-facts : get default crush rule value from ceph configuration] ************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:257 Wednesday 17 April 2019 11:25:26 +0000 (0:00:00.290) 0:01:52.540 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - grep - osd pool default crush rule - /etc/ceph/ceph.conf delta: '0:00:00.001728' end: '2019-04-17 11:25:26.322137' failed_when_result: false rc: 0 start: '2019-04-17 11:25:26.320409' stderr: '' stderr_lines: [] stdout: osd pool default crush rule = -1 stdout_lines: ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - grep - osd pool default crush rule - /etc/ceph/ceph.conf delta: '0:00:00.001621' end: '2019-04-17 11:25:26.335081' failed_when_result: false rc: 0 start: '2019-04-17 11:25:26.333460' stderr: '' stderr_lines: [] stdout: osd pool default crush rule = -1 stdout_lines: ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - grep - osd pool default crush rule - /etc/ceph/ceph.conf delta: '0:00:00.001661' end: '2019-04-17 11:25:26.371605' failed_when_result: false rc: 0 start: '2019-04-17 11:25:26.369944' stderr: '' stderr_lines: [] stdout: osd pool default crush rule = -1 stdout_lines: TASK [ceph-facts : set_fact osd_pool_default_crush_rule] **************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:264 Wednesday 17 April 2019 11:25:26 +0000 (0:00:00.283) 0:01:52.823 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_crush_rule: '-1' ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_crush_rule: '-1' ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_crush_rule: '-1' TASK [ceph-facts : set_fact _monitor_address to monitor_address_block] ************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:2 Wednesday 17 April 2019 11:25:26 +0000 (0:00:00.108) 0:01:52.932 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _monitor_address to monitor_address] ******************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:12 Wednesday 17 April 2019 11:25:26 +0000 (0:00:00.115) 0:01:53.047 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _monitor_address to monitor_interface - ipv4] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:22 Wednesday 17 April 2019 11:25:26 +0000 (0:00:00.108) 0:01:53.156 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _monitor_address to monitor_interface - ipv6] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:34 Wednesday 17 April 2019 11:25:26 +0000 (0:00:00.107) 0:01:53.263 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _current_monitor_address] ******************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:46 Wednesday 17 April 2019 11:25:26 +0000 (0:00:00.107) 0:01:53.371 ******* skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h17-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.17'}) => changed=false item: addr: 10.1.24.17 name: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h17-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.17'}) => changed=false item: addr: 10.1.24.17 name: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h19-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.19'}) => changed=false item: addr: 10.1.24.19 name: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h17-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.17'}) => changed=false ansible_facts: _current_monitor_address: 10.1.24.17 item: addr: 10.1.24.17 name: e24-h17-740xd.alias.bos.scalelab.redhat.com skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h19-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.19'}) => changed=false item: addr: 10.1.24.19 name: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h21-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.21'}) => changed=false item: addr: 10.1.24.21 name: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h19-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.19'}) => changed=false ansible_facts: _current_monitor_address: 10.1.24.19 item: addr: 10.1.24.19 name: e24-h19-740xd.alias.bos.scalelab.redhat.com skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h21-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.21'}) => changed=false item: addr: 10.1.24.21 name: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h21-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.21'}) => changed=false ansible_facts: _current_monitor_address: 10.1.24.21 item: addr: 10.1.24.21 name: e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-facts : set_fact _radosgw_address to radosgw_address_block] ************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:2 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.170) 0:01:53.542 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _radosgw_address to radosgw_address] ******************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:9 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.091) 0:01:53.633 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _interface] ********************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:17 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.090) 0:01:53.723 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _radosgw_address to radosgw_interface - ipv4] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:21 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.089) 0:01:53.812 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _radosgw_address to radosgw_interface - ipv6] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:27 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.087) 0:01:53.900 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rgw_instances] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:277 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.089) 0:01:53.990 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=0) => changed=false item: '0' skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=0) => changed=false item: '0' skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=0) => changed=false item: '0' skip_reason: Conditional result was False TASK [ceph-facts : set ntp service name for Debian family] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:286 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.183) 0:01:54.173 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set ntp service name for Red Hat family] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:291 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.089) 0:01:54.263 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ntp_service_name: ntpd ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ntp_service_name: ntpd ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ntp_service_name: ntpd TASK [ceph-handler : include check_running_containers.yml] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_running_cluster.yml:2 Wednesday 17 April 2019 11:25:27 +0000 (0:00:00.105) 0:01:54.368 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : include check_socket_non_container.yml] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_running_cluster.yml:7 Wednesday 17 April 2019 11:25:28 +0000 (0:00:00.086) 0:01:54.454 ******* included: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-handler : check for a ceph mon socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:2 Wednesday 17 April 2019 11:25:28 +0000 (0:00:00.169) 0:01:54.624 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-mon*.asok delta: '0:00:00.002521' end: '2019-04-17 11:25:28.352357' failed_when_result: false rc: 0 start: '2019-04-17 11:25:28.349836' stderr: '' stderr_lines: [] stdout: /var/run/ceph/ceph-mon.e24-h17-740xd.asok stdout_lines: ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-mon*.asok delta: '0:00:00.002224' end: '2019-04-17 11:25:28.374731' failed_when_result: false rc: 0 start: '2019-04-17 11:25:28.372507' stderr: '' stderr_lines: [] stdout: /var/run/ceph/ceph-mon.e24-h19-740xd.asok stdout_lines: ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-mon*.asok delta: '0:00:01.003580' end: '2019-04-17 11:25:29.407544' failed_when_result: false rc: 0 start: '2019-04-17 11:25:28.403964' stderr: '' stderr_lines: [] stdout: /var/run/ceph/ceph-mon.e24-h21-740xd.asok stdout_lines: TASK [ceph-handler : check if the ceph mon socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:11 Wednesday 17 April 2019 11:25:29 +0000 (0:00:01.235) 0:01:55.859 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - fuser - --silent - /var/run/ceph/ceph-mon.e24-h17-740xd.asok delta: '0:00:00.017744' end: '2019-04-17 11:25:29.603473' failed_when_result: false rc: 0 start: '2019-04-17 11:25:29.585729' stderr: '' stderr_lines: [] stdout: '' stdout_lines: ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - fuser - --silent - /var/run/ceph/ceph-mon.e24-h19-740xd.asok delta: '0:00:00.018098' end: '2019-04-17 11:25:29.627842' failed_when_result: false rc: 0 start: '2019-04-17 11:25:29.609744' stderr: '' stderr_lines: [] stdout: '' stdout_lines: ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - fuser - --silent - /var/run/ceph/ceph-mon.e24-h21-740xd.asok delta: '0:00:00.017315' end: '2019-04-17 11:25:29.660643' failed_when_result: false rc: 0 start: '2019-04-17 11:25:29.643328' stderr: '' stderr_lines: [] stdout: '' stdout_lines: TASK [ceph-handler : remove ceph mon socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:21 Wednesday 17 April 2019 11:25:29 +0000 (0:00:00.251) 0:01:56.111 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph osd socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:30 Wednesday 17 April 2019 11:25:29 +0000 (0:00:00.092) 0:01:56.204 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph osd socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:40 Wednesday 17 April 2019 11:25:29 +0000 (0:00:00.089) 0:01:56.293 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph osd socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:50 Wednesday 17 April 2019 11:25:29 +0000 (0:00:00.087) 0:01:56.381 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph mds socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:59 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.091) 0:01:56.473 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph mds socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:69 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.089) 0:01:56.563 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph mds socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:79 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.087) 0:01:56.650 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph rgw socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:88 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.088) 0:01:56.739 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph rgw socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:98 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.089) 0:01:56.828 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph rgw socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:108 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.091) 0:01:56.920 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph mgr socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:117 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.089) 0:01:57.009 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-mgr*.asok delta: '0:00:00.002278' end: '2019-04-17 11:25:30.737732' failed_when_result: false msg: non-zero return code rc: 1 start: '2019-04-17 11:25:30.735454' stderr: 'stat: cannot stat ''/var/run/ceph/ceph-mgr*.asok'': No such file or directory' stderr_lines: - 'stat: cannot stat ''/var/run/ceph/ceph-mgr*.asok'': No such file or directory' stdout: '' stdout_lines: ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-mgr*.asok delta: '0:00:00.002258' end: '2019-04-17 11:25:30.761677' failed_when_result: false msg: non-zero return code rc: 1 start: '2019-04-17 11:25:30.759419' stderr: 'stat: cannot stat ''/var/run/ceph/ceph-mgr*.asok'': No such file or directory' stderr_lines: - 'stat: cannot stat ''/var/run/ceph/ceph-mgr*.asok'': No such file or directory' stdout: '' stdout_lines: ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-mgr*.asok delta: '0:00:00.002375' end: '2019-04-17 11:25:30.794432' failed_when_result: false msg: non-zero return code rc: 1 start: '2019-04-17 11:25:30.792057' stderr: 'stat: cannot stat ''/var/run/ceph/ceph-mgr*.asok'': No such file or directory' stderr_lines: - 'stat: cannot stat ''/var/run/ceph/ceph-mgr*.asok'': No such file or directory' stdout: '' stdout_lines: TASK [ceph-handler : check if the ceph mgr socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:127 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.236) 0:01:57.246 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph mgr socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:137 Wednesday 17 April 2019 11:25:30 +0000 (0:00:00.091) 0:01:57.337 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph rbd mirror socket] **************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:146 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.099) 0:01:57.437 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph rbd mirror socket is in-use] ***************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:156 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.089) 0:01:57.526 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph rbd mirror socket if exists and not used by a process] ********************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:166 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.089) 0:01:57.615 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph nfs ganesha socket] *************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:175 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.088) 0:01:57.704 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph nfs ganesha socket is in-use] **************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:184 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.089) 0:01:57.793 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph nfs ganesha socket if exists and not used by a process] ******************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:194 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.091) 0:01:57.885 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a tcmu-runner] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:203 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.089) 0:01:57.975 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a rbd-target-api] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:212 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.088) 0:01:58.063 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a rbd-target-gw] ************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:221 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.087) 0:01:58.151 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include_tasks installs/install_on_redhat.yml] ******************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:2 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.087) 0:01:58.238 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_on_redhat.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : include configure_redhat_repository_installation.yml] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_on_redhat.yml:2 Wednesday 17 April 2019 11:25:31 +0000 (0:00:00.164) 0:01:58.403 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include configure_redhat_local_installation.yml] **************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_on_redhat.yml:7 Wednesday 17 April 2019 11:25:32 +0000 (0:00:00.088) 0:01:58.491 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include install_redhat_packages.yml] **************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_on_redhat.yml:12 Wednesday 17 April 2019 11:25:32 +0000 (0:00:00.090) 0:01:58.581 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : install redhat dependencies] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:2 Wednesday 17 April 2019 11:25:32 +0000 (0:00:00.157) 0:01:58.739 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: python3-pycurl' - 'Installed: python3-setuptools' ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: python3-pycurl' - 'Installed: python3-setuptools' ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: python3-pycurl' - 'Installed: python3-setuptools' TASK [ceph-common : install centos dependencies] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:11 Wednesday 17 April 2019 11:25:33 +0000 (0:00:00.946) 0:01:59.685 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : install redhat ceph packages] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:20 Wednesday 17 April 2019 11:25:33 +0000 (0:00:00.090) 0:01:59.776 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: ceph-common' - 'Installed: ceph-mon' - 'Installed: ceph-fuse' - 'Installed: ceph-base' ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: ceph-common' - 'Installed: ceph-mon' - 'Installed: ceph-fuse' - 'Installed: ceph-base' ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: ceph-common' - 'Installed: ceph-mon' - 'Installed: ceph-fuse' - 'Installed: ceph-base' TASK [ceph-common : include_tasks installs/install_on_suse.yml] ********************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:8 Wednesday 17 April 2019 11:25:34 +0000 (0:00:00.960) 0:02:00.736 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include installs/install_on_debian.yml] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:14 Wednesday 17 April 2019 11:25:34 +0000 (0:00:00.092) 0:02:00.829 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include_tasks installs/install_on_clear.yml] ******************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:21 Wednesday 17 April 2019 11:25:34 +0000 (0:00:00.089) 0:02:00.918 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : get ceph version] *********************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:27 Wednesday 17 April 2019 11:25:34 +0000 (0:00:00.088) 0:02:01.007 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - ceph - --version delta: '0:00:00.072632' end: '2019-04-17 11:25:34.803691' rc: 0 start: '2019-04-17 11:25:34.731059' stderr: '' stderr_lines: [] stdout: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stdout_lines: ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - ceph - --version delta: '0:00:00.072574' end: '2019-04-17 11:25:34.828348' rc: 0 start: '2019-04-17 11:25:34.755774' stderr: '' stderr_lines: [] stdout: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stdout_lines: ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - ceph - --version delta: '0:00:00.075756' end: '2019-04-17 11:25:34.859610' rc: 0 start: '2019-04-17 11:25:34.783854' stderr: '' stderr_lines: [] stdout: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stdout_lines: TASK [ceph-common : set_fact ceph_version] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:33 Wednesday 17 April 2019 11:25:34 +0000 (0:00:00.302) 0:02:01.309 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_version: 14.2.0-142-g2f9c072 ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_version: 14.2.0-142-g2f9c072 ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_version: 14.2.0-142-g2f9c072 TASK [ceph-common : include release-rhcs.yml] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:38 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.125) 0:02:01.435 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : set_fact ceph_release jewel] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:2 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.161) 0:02:01.596 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : set_fact ceph_release kraken] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:8 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.087) 0:02:01.684 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : set_fact ceph_release luminous] ********************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:14 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.096) 0:02:01.780 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : set_fact ceph_release mimic] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:20 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.089) 0:02:01.870 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : set_fact ceph_release nautilus] ********************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:26 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.089) 0:02:01.959 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: nautilus ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: nautilus ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: nautilus TASK [ceph-common : set_fact ceph_release - override ceph_release with ceph_stable_release] ***************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:47 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.123) 0:02:02.082 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include create_rbd_client_dir.yml] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:56 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.088) 0:02:02.171 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/create_rbd_client_dir.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : create rbd client directory] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/create_rbd_client_dir.yml:2 Wednesday 17 April 2019 11:25:35 +0000 (0:00:00.159) 0:02:02.330 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=/var/run/ceph) => changed=false gid: 167 group: ceph item: /var/run/ceph mode: '0770' owner: ceph path: /var/run/ceph size: 60 state: directory uid: 167 ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=/var/run/ceph) => changed=false gid: 167 group: ceph item: /var/run/ceph mode: '0770' owner: ceph path: /var/run/ceph size: 60 state: directory uid: 167 ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=/var/run/ceph) => changed=false gid: 167 group: ceph item: /var/run/ceph mode: '0770' owner: ceph path: /var/run/ceph size: 60 state: directory uid: 167 ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item=/var/log/ceph) => changed=false gid: 167 group: ceph item: /var/log/ceph mode: '0770' owner: ceph path: /var/log/ceph size: 78 state: directory uid: 167 ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item=/var/log/ceph) => changed=false gid: 167 group: ceph item: /var/log/ceph mode: '0770' owner: ceph path: /var/log/ceph size: 78 state: directory uid: 167 ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item=/var/log/ceph) => changed=false gid: 167 group: ceph item: /var/log/ceph mode: '0770' owner: ceph path: /var/log/ceph size: 78 state: directory uid: 167 TASK [ceph-common : include configure_cluster_name.yml] ***************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:59 Wednesday 17 April 2019 11:25:36 +0000 (0:00:00.414) 0:02:02.744 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : configure cluster name] ***************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml:2 Wednesday 17 April 2019 11:25:36 +0000 (0:00:00.162) 0:02:02.907 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false backup: '' msg: '' ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false backup: '' msg: '' ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false backup: '' msg: '' TASK [ceph-common : check /etc/default/ceph exist] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml:24 Wednesday 17 April 2019 11:25:36 +0000 (0:00:00.277) 0:02:03.185 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : when /etc/default/ceph is not dir] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml:32 Wednesday 17 April 2019 11:25:36 +0000 (0:00:00.089) 0:02:03.274 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : when /etc/default/ceph is dir] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml:42 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.190) 0:02:03.465 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include configure_memory_allocator.yml] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:62 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.090) 0:02:03.555 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : include create_ceph_initial_dirs.yml] *************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:2 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.088) 0:02:03.644 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : count number of osds for ceph-disk scenarios] ******************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:8 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.087) 0:02:03.731 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : count number of osds for lvm scenario] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:15 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.088) 0:02:03.820 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : run 'ceph-volume lvm batch --report' to see how many osds are to be created] ************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:22 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.092) 0:02:03.912 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : set_fact num_osds from the output of 'ceph-volume lvm batch --report'] ****************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:41 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.088) 0:02:04.000 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : run 'ceph-volume lvm list' to see how many osds have already been created] ************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:49 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.089) 0:02:04.089 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : set_fact num_osds from the output of 'ceph-volume lvm list'] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:62 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.089) 0:02:04.178 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : create ceph conf directory] ************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:75 Wednesday 17 April 2019 11:25:37 +0000 (0:00:00.087) 0:02:04.266 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false gid: 167 group: ceph mode: '0755' owner: ceph path: /etc/ceph size: 184 state: directory uid: 167 ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false gid: 167 group: ceph mode: '0755' owner: ceph path: /etc/ceph size: 70 state: directory uid: 167 ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false gid: 167 group: ceph mode: '0755' owner: ceph path: /etc/ceph size: 70 state: directory uid: 167 TASK [ceph-config : generate ceph configuration file: ceph.conf] ******************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:83 Wednesday 17 April 2019 11:25:38 +0000 (0:00:00.232) 0:02:04.498 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false checksum: 343501dbce18de60c572121f8ed6af687ab69c81 dest: /etc/ceph/ceph.conf gid: 167 group: ceph md5sum: 433a181655153ff4c2cb126d824dc0c8 mode: '0644' owner: ceph size: 747 src: /root/.ansible/tmp/ansible-tmp-1555500338.1136189-229690202896242/source state: file uid: 167 ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false checksum: 343501dbce18de60c572121f8ed6af687ab69c81 dest: /etc/ceph/ceph.conf gid: 167 group: ceph md5sum: 433a181655153ff4c2cb126d824dc0c8 mode: '0644' owner: ceph size: 747 src: /root/.ansible/tmp/ansible-tmp-1555500338.1393962-19647200400838/source state: file uid: 167 ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false checksum: 343501dbce18de60c572121f8ed6af687ab69c81 dest: /etc/ceph/ceph.conf gid: 167 group: ceph md5sum: 433a181655153ff4c2cb126d824dc0c8 mode: '0644' owner: ceph size: 747 src: /root/.ansible/tmp/ansible-tmp-1555500338.1710885-265072842900963/source state: file uid: 167 TASK [ceph-config : ensure fetch directory exists] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:101 Wednesday 17 April 2019 11:25:38 +0000 (0:00:00.536) 0:02:05.034 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : generate ceph.conf configuration file locally] ****************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:112 Wednesday 17 April 2019 11:25:38 +0000 (0:00:00.043) 0:02:05.078 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : create a local fetch directory if it does not exist] ************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:129 Wednesday 17 April 2019 11:25:38 +0000 (0:00:00.041) 0:02:05.119 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : generate ceph.conf configuration file] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:142 Wednesday 17 April 2019 11:25:38 +0000 (0:00:00.037) 0:02:05.156 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-mgr : set_fact docker_exec_cmd] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/main.yml:2 Wednesday 17 April 2019 11:25:38 +0000 (0:00:00.090) 0:02:05.246 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-mgr : include common.yml] ************************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/main.yml:8 Wednesday 17 April 2019 11:25:38 +0000 (0:00:00.087) 0:02:05.333 ******* included: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/common.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-mgr : create mgr directory] ********************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/common.yml:2 Wednesday 17 April 2019 11:25:39 +0000 (0:00:00.151) 0:02:05.485 ******* changed: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=true gid: 167 group: ceph mode: '0755' owner: ceph path: /var/lib/ceph/mgr/ceph-e24-h17-740xd size: 6 state: directory uid: 167 changed: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=true gid: 167 group: ceph mode: '0755' owner: ceph path: /var/lib/ceph/mgr/ceph-e24-h19-740xd size: 6 state: directory uid: 167 changed: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=true gid: 167 group: ceph mode: '0755' owner: ceph path: /var/lib/ceph/mgr/ceph-e24-h21-740xd size: 6 state: directory uid: 167 TASK [ceph-mgr : fetch ceph mgr keyring] ******************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/common.yml:10 Wednesday 17 April 2019 11:25:39 +0000 (0:00:00.233) 0:02:05.718 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-mgr : copy ceph keyring(s) if needed] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/common.yml:30 Wednesday 17 April 2019 11:25:39 +0000 (0:00:00.091) 0:02:05.810 ******* changed: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.mgr.e24-h17-740xd.keyring', 'dest': '/var/lib/ceph/mgr/ceph-e24-h17-740xd/keyring', 'copy_key': True}) => changed=true checksum: 38f4fc4c11e15cf458a266ebd44e881678bfb058 dest: /var/lib/ceph/mgr/ceph-e24-h17-740xd/keyring gid: 167 group: ceph item: copy_key: true dest: /var/lib/ceph/mgr/ceph-e24-h17-740xd/keyring name: /etc/ceph/ceph.mgr.e24-h17-740xd.keyring md5sum: 3a6dc048bf82aecb41aa0a86b565804c mode: '0600' owner: ceph size: 144 src: /root/.ansible/tmp/ansible-tmp-1555500339.4349082-31255030006130/source state: file uid: 167 skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.client.admin.keyring', 'dest': '/etc/ceph/ceph.client.admin.keyring', 'copy_key': False}) => changed=false item: copy_key: false dest: /etc/ceph/ceph.client.admin.keyring name: /etc/ceph/ceph.client.admin.keyring skip_reason: Conditional result was False changed: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.mgr.e24-h19-740xd.keyring', 'dest': '/var/lib/ceph/mgr/ceph-e24-h19-740xd/keyring', 'copy_key': True}) => changed=true checksum: fbfadb035cd82e04512f2c9e9fb31a5ed63d37a5 dest: /var/lib/ceph/mgr/ceph-e24-h19-740xd/keyring gid: 167 group: ceph item: copy_key: true dest: /var/lib/ceph/mgr/ceph-e24-h19-740xd/keyring name: /etc/ceph/ceph.mgr.e24-h19-740xd.keyring md5sum: 1809c6b8f0cc06083b4ba14685a9e26e mode: '0600' owner: ceph size: 144 src: /root/.ansible/tmp/ansible-tmp-1555500339.4621224-107466391483499/source state: file uid: 167 skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.client.admin.keyring', 'dest': '/etc/ceph/ceph.client.admin.keyring', 'copy_key': False}) => changed=false item: copy_key: false dest: /etc/ceph/ceph.client.admin.keyring name: /etc/ceph/ceph.client.admin.keyring skip_reason: Conditional result was False changed: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.mgr.e24-h21-740xd.keyring', 'dest': '/var/lib/ceph/mgr/ceph-e24-h21-740xd/keyring', 'copy_key': True}) => changed=true checksum: b0bb0d2de297162db14f09e1e3018859af14ff02 dest: /var/lib/ceph/mgr/ceph-e24-h21-740xd/keyring gid: 167 group: ceph item: copy_key: true dest: /var/lib/ceph/mgr/ceph-e24-h21-740xd/keyring name: /etc/ceph/ceph.mgr.e24-h21-740xd.keyring md5sum: da1e8b5c5885f90e5aed07322c576371 mode: '0600' owner: ceph size: 144 src: /root/.ansible/tmp/ansible-tmp-1555500339.4936507-213414143090983/source state: file uid: 167 skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.client.admin.keyring', 'dest': '/etc/ceph/ceph.client.admin.keyring', 'copy_key': False}) => changed=false item: copy_key: false dest: /etc/ceph/ceph.client.admin.keyring name: /etc/ceph/ceph.client.admin.keyring skip_reason: Conditional result was False TASK [ceph-mgr : set mgr key permissions] ******************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/common.yml:45 Wednesday 17 April 2019 11:25:39 +0000 (0:00:00.476) 0:02:06.286 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false gid: 167 group: ceph mode: '0600' owner: ceph path: /var/lib/ceph/mgr/ceph-e24-h17-740xd/keyring size: 144 state: file uid: 167 ok: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false gid: 167 group: ceph mode: '0600' owner: ceph path: /var/lib/ceph/mgr/ceph-e24-h19-740xd/keyring size: 144 state: file uid: 167 ok: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false gid: 167 group: ceph mode: '0600' owner: ceph path: /var/lib/ceph/mgr/ceph-e24-h21-740xd/keyring size: 144 state: file uid: 167 TASK [ceph-mgr : include pre_requisite.yml] ***************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/main.yml:11 Wednesday 17 April 2019 11:25:40 +0000 (0:00:00.232) 0:02:06.519 ******* included: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/pre_requisite.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-mgr : install ceph-mgr package on RedHat or SUSE] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/pre_requisite.yml:2 Wednesday 17 April 2019 11:25:40 +0000 (0:00:00.161) 0:02:06.680 ******* changed: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=true attempts: 1 msg: '' rc: 0 results: - 'Installed: ceph-mgr' - 'Installed: ceph-mgr-2:14.2.0-142.g2f9c072.el8cp.x86_64' changed: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=true attempts: 1 msg: '' rc: 0 results: - 'Installed: ceph-mgr' - 'Installed: ceph-mgr-2:14.2.0-142.g2f9c072.el8cp.x86_64' changed: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=true attempts: 1 msg: '' rc: 0 results: - 'Installed: ceph-mgr' - 'Installed: ceph-mgr-2:14.2.0-142.g2f9c072.el8cp.x86_64' TASK [ceph-mgr : install ceph mgr for debian] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/pre_requisite.yml:11 Wednesday 17 April 2019 11:25:42 +0000 (0:00:02.502) 0:02:09.183 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-mgr : include start_mgr.yml] ********************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/main.yml:15 Wednesday 17 April 2019 11:25:42 +0000 (0:00:00.091) 0:02:09.274 ******* included: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/start_mgr.yml for e24-h17-740xd.alias.bos.scalelab.redhat.com, e24-h19-740xd.alias.bos.scalelab.redhat.com, e24-h21-740xd.alias.bos.scalelab.redhat.com TASK [ceph-mgr : ensure systemd service override directory exists] ****************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/start_mgr.yml:2 Wednesday 17 April 2019 11:25:43 +0000 (0:00:00.156) 0:02:09.431 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-mgr : add ceph-mgr systemd service overrides] **************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/start_mgr.yml:10 Wednesday 17 April 2019 11:25:43 +0000 (0:00:00.089) 0:02:09.521 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-mgr : generate systemd unit file] **************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/start_mgr.yml:20 Wednesday 17 April 2019 11:25:43 +0000 (0:00:00.088) 0:02:09.609 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-mgr : systemd start mgr] ************************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/start_mgr.yml:33 Wednesday 17 April 2019 11:25:43 +0000 (0:00:00.086) 0:02:09.696 ******* changed: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=true enabled: true name: ceph-mgr@e24-h17-740xd state: started status: ActiveEnterTimestampMonotonic: '0' ActiveExitTimestampMonotonic: '0' ActiveState: inactive After: tmp.mount sysinit.target -.mount systemd-tmpfiles-setup.service time-sync.target basic.target local-fs.target systemd-journald.socket network-online.target system-ceph\x2dmgr.slice AllowIsolate: 'no' AmbientCapabilities: '' AssertResult: 'no' AssertTimestampMonotonic: '0' Before: shutdown.target BlockIOAccounting: 'no' BlockIOWeight: '[not set]' CPUAccounting: 'no' CPUQuotaPerSecUSec: infinity CPUSchedulingPolicy: '0' CPUSchedulingPriority: '0' CPUSchedulingResetOnFork: 'no' CPUShares: '[not set]' CPUUsageNSec: '[not set]' CPUWeight: '[not set]' CacheDirectoryMode: '0755' CanIsolate: 'no' CanReload: 'yes' CanStart: 'yes' CanStop: 'yes' CapabilityBoundingSet: cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend CollectMode: inactive ConditionResult: 'no' ConditionTimestampMonotonic: '0' ConfigurationDirectoryMode: '0755' Conflicts: shutdown.target ControlPID: '0' DefaultDependencies: 'yes' Delegate: 'no' Description: Ceph cluster manager daemon DevicePolicy: closed DynamicUser: 'no' Environment: CLUSTER=ceph EnvironmentFiles: /etc/sysconfig/ceph (ignore_errors=yes) ExecMainCode: '0' ExecMainExitTimestampMonotonic: '0' ExecMainPID: '0' ExecMainStartTimestampMonotonic: '0' ExecMainStatus: '0' ExecReload: '{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }' ExecStart: '{ path=/usr/bin/ceph-mgr ; argv[]=/usr/bin/ceph-mgr -f --cluster ${CLUSTER} --id e24-h17-740xd --setuser ceph --setgroup ceph ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }' FailureAction: none FileDescriptorStoreMax: '0' FragmentPath: /usr/lib/systemd/system/ceph-mgr@.service GID: '[not set]' GuessMainPID: 'yes' IOAccounting: 'no' IOSchedulingClass: '0' IOSchedulingPriority: '0' IOWeight: '[not set]' IPAccounting: 'no' IPEgressBytes: '18446744073709551615' IPEgressPackets: '18446744073709551615' IPIngressBytes: '18446744073709551615' IPIngressPackets: '18446744073709551615' Id: ceph-mgr@e24-h17-740xd.service IgnoreOnIsolate: 'no' IgnoreSIGPIPE: 'yes' InactiveEnterTimestampMonotonic: '0' InactiveExitTimestampMonotonic: '0' JobRunningTimeoutUSec: infinity JobTimeoutAction: none JobTimeoutUSec: infinity KeyringMode: private KillMode: control-group KillSignal: '15' LimitAS: infinity LimitASSoft: infinity LimitCORE: infinity LimitCORESoft: infinity LimitCPU: infinity LimitCPUSoft: infinity LimitDATA: infinity LimitDATASoft: infinity LimitFSIZE: infinity LimitFSIZESoft: infinity LimitLOCKS: infinity LimitLOCKSSoft: infinity LimitMEMLOCK: '16777216' LimitMEMLOCKSoft: '16777216' LimitMSGQUEUE: '819200' LimitMSGQUEUESoft: '819200' LimitNICE: '0' LimitNICESoft: '0' LimitNOFILE: '1048576' LimitNOFILESoft: '1048576' LimitNPROC: '1048576' LimitNPROCSoft: '1048576' LimitRSS: infinity LimitRSSSoft: infinity LimitRTPRIO: '0' LimitRTPRIOSoft: '0' LimitRTTIME: infinity LimitRTTIMESoft: infinity LimitSIGPENDING: '767321' LimitSIGPENDINGSoft: '767321' LimitSTACK: infinity LimitSTACKSoft: '8388608' LoadState: loaded LockPersonality: 'yes' LogLevelMax: '-1' LogsDirectoryMode: '0755' MainPID: '0' MemoryAccounting: 'yes' MemoryCurrent: '[not set]' MemoryDenyWriteExecute: 'yes' MemoryHigh: infinity MemoryLimit: infinity MemoryLow: '0' MemoryMax: infinity MemorySwapMax: infinity MountAPIVFS: 'no' MountFlags: '' NFileDescriptorStore: '0' NRestarts: '0' Names: ceph-mgr@e24-h17-740xd.service NeedDaemonReload: 'no' Nice: '0' NoNewPrivileges: 'yes' NonBlocking: 'no' NotifyAccess: none OOMScoreAdjust: '0' OnFailureJobMode: replace PartOf: ceph-mgr.target PermissionsStartOnly: 'no' Perpetual: 'no' PrivateDevices: 'yes' PrivateMounts: 'no' PrivateNetwork: 'no' PrivateTmp: 'yes' PrivateUsers: 'no' ProtectControlGroups: 'yes' ProtectHome: 'yes' ProtectKernelModules: 'yes' ProtectKernelTunables: 'yes' ProtectSystem: full RefuseManualStart: 'no' RefuseManualStop: 'no' RemainAfterExit: 'no' RemoveIPC: 'no' Requires: system-ceph\x2dmgr.slice sysinit.target -.mount RequiresMountsFor: /var/tmp Restart: on-failure RestartUSec: 10s RestrictNamespaces: 'no' RestrictRealtime: 'no' Result: success RootDirectoryStartOnly: 'no' RuntimeDirectoryMode: '0755' RuntimeDirectoryPreserve: 'no' RuntimeMaxUSec: infinity SameProcessGroup: 'no' SecureBits: '0' SendSIGHUP: 'no' SendSIGKILL: 'yes' Slice: system-ceph\x2dmgr.slice StandardError: inherit StandardInput: 'null' StandardInputData: '' StandardOutput: journal StartLimitAction: none StartLimitBurst: '3' StartLimitIntervalUSec: 30min StartupBlockIOWeight: '[not set]' StartupCPUShares: '[not set]' StartupCPUWeight: '[not set]' StartupIOWeight: '[not set]' StateChangeTimestampMonotonic: '0' StateDirectoryMode: '0755' StatusErrno: '0' StopWhenUnneeded: 'no' SubState: dead SuccessAction: none SyslogFacility: '3' SyslogLevel: '6' SyslogLevelPrefix: 'yes' SyslogPriority: '30' SystemCallErrorNumber: '0' TTYReset: 'no' TTYVHangup: 'no' TTYVTDisallocate: 'no' TasksAccounting: 'yes' TasksCurrent: '[not set]' TasksMax: '52428' TimeoutStartUSec: 1min 30s TimeoutStopUSec: 1min 30s TimerSlackNSec: '50000' Transient: 'no' Type: simple UID: '[not set]' UMask: '0022' UnitFilePreset: disabled UnitFileState: disabled UtmpMode: init Wants: network-online.target time-sync.target local-fs.target WatchdogTimestampMonotonic: '0' WatchdogUSec: '0' changed: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=true enabled: true name: ceph-mgr@e24-h19-740xd state: started status: ActiveEnterTimestampMonotonic: '0' ActiveExitTimestampMonotonic: '0' ActiveState: inactive After: systemd-tmpfiles-setup.service system-ceph\x2dmgr.slice -.mount sysinit.target basic.target local-fs.target tmp.mount network-online.target time-sync.target systemd-journald.socket AllowIsolate: 'no' AmbientCapabilities: '' AssertResult: 'no' AssertTimestampMonotonic: '0' Before: shutdown.target BlockIOAccounting: 'no' BlockIOWeight: '[not set]' CPUAccounting: 'no' CPUQuotaPerSecUSec: infinity CPUSchedulingPolicy: '0' CPUSchedulingPriority: '0' CPUSchedulingResetOnFork: 'no' CPUShares: '[not set]' CPUUsageNSec: '[not set]' CPUWeight: '[not set]' CacheDirectoryMode: '0755' CanIsolate: 'no' CanReload: 'yes' CanStart: 'yes' CanStop: 'yes' CapabilityBoundingSet: cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend CollectMode: inactive ConditionResult: 'no' ConditionTimestampMonotonic: '0' ConfigurationDirectoryMode: '0755' Conflicts: shutdown.target ControlPID: '0' DefaultDependencies: 'yes' Delegate: 'no' Description: Ceph cluster manager daemon DevicePolicy: closed DynamicUser: 'no' Environment: CLUSTER=ceph EnvironmentFiles: /etc/sysconfig/ceph (ignore_errors=yes) ExecMainCode: '0' ExecMainExitTimestampMonotonic: '0' ExecMainPID: '0' ExecMainStartTimestampMonotonic: '0' ExecMainStatus: '0' ExecReload: '{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }' ExecStart: '{ path=/usr/bin/ceph-mgr ; argv[]=/usr/bin/ceph-mgr -f --cluster ${CLUSTER} --id e24-h19-740xd --setuser ceph --setgroup ceph ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }' FailureAction: none FileDescriptorStoreMax: '0' FragmentPath: /usr/lib/systemd/system/ceph-mgr@.service GID: '[not set]' GuessMainPID: 'yes' IOAccounting: 'no' IOSchedulingClass: '0' IOSchedulingPriority: '0' IOWeight: '[not set]' IPAccounting: 'no' IPEgressBytes: '18446744073709551615' IPEgressPackets: '18446744073709551615' IPIngressBytes: '18446744073709551615' IPIngressPackets: '18446744073709551615' Id: ceph-mgr@e24-h19-740xd.service IgnoreOnIsolate: 'no' IgnoreSIGPIPE: 'yes' InactiveEnterTimestampMonotonic: '0' InactiveExitTimestampMonotonic: '0' JobRunningTimeoutUSec: infinity JobTimeoutAction: none JobTimeoutUSec: infinity KeyringMode: private KillMode: control-group KillSignal: '15' LimitAS: infinity LimitASSoft: infinity LimitCORE: infinity LimitCORESoft: infinity LimitCPU: infinity LimitCPUSoft: infinity LimitDATA: infinity LimitDATASoft: infinity LimitFSIZE: infinity LimitFSIZESoft: infinity LimitLOCKS: infinity LimitLOCKSSoft: infinity LimitMEMLOCK: '16777216' LimitMEMLOCKSoft: '16777216' LimitMSGQUEUE: '819200' LimitMSGQUEUESoft: '819200' LimitNICE: '0' LimitNICESoft: '0' LimitNOFILE: '1048576' LimitNOFILESoft: '1048576' LimitNPROC: '1048576' LimitNPROCSoft: '1048576' LimitRSS: infinity LimitRSSSoft: infinity LimitRTPRIO: '0' LimitRTPRIOSoft: '0' LimitRTTIME: infinity LimitRTTIMESoft: infinity LimitSIGPENDING: '767321' LimitSIGPENDINGSoft: '767321' LimitSTACK: infinity LimitSTACKSoft: '8388608' LoadState: loaded LockPersonality: 'yes' LogLevelMax: '-1' LogsDirectoryMode: '0755' MainPID: '0' MemoryAccounting: 'yes' MemoryCurrent: '[not set]' MemoryDenyWriteExecute: 'yes' MemoryHigh: infinity MemoryLimit: infinity MemoryLow: '0' MemoryMax: infinity MemorySwapMax: infinity MountAPIVFS: 'no' MountFlags: '' NFileDescriptorStore: '0' NRestarts: '0' Names: ceph-mgr@e24-h19-740xd.service NeedDaemonReload: 'no' Nice: '0' NoNewPrivileges: 'yes' NonBlocking: 'no' NotifyAccess: none OOMScoreAdjust: '0' OnFailureJobMode: replace PartOf: ceph-mgr.target PermissionsStartOnly: 'no' Perpetual: 'no' PrivateDevices: 'yes' PrivateMounts: 'no' PrivateNetwork: 'no' PrivateTmp: 'yes' PrivateUsers: 'no' ProtectControlGroups: 'yes' ProtectHome: 'yes' ProtectKernelModules: 'yes' ProtectKernelTunables: 'yes' ProtectSystem: full RefuseManualStart: 'no' RefuseManualStop: 'no' RemainAfterExit: 'no' RemoveIPC: 'no' Requires: system-ceph\x2dmgr.slice sysinit.target -.mount RequiresMountsFor: /var/tmp Restart: on-failure RestartUSec: 10s RestrictNamespaces: 'no' RestrictRealtime: 'no' Result: success RootDirectoryStartOnly: 'no' RuntimeDirectoryMode: '0755' RuntimeDirectoryPreserve: 'no' RuntimeMaxUSec: infinity SameProcessGroup: 'no' SecureBits: '0' SendSIGHUP: 'no' SendSIGKILL: 'yes' Slice: system-ceph\x2dmgr.slice StandardError: inherit StandardInput: 'null' StandardInputData: '' StandardOutput: journal StartLimitAction: none StartLimitBurst: '3' StartLimitIntervalUSec: 30min StartupBlockIOWeight: '[not set]' StartupCPUShares: '[not set]' StartupCPUWeight: '[not set]' StartupIOWeight: '[not set]' StateChangeTimestampMonotonic: '0' StateDirectoryMode: '0755' StatusErrno: '0' StopWhenUnneeded: 'no' SubState: dead SuccessAction: none SyslogFacility: '3' SyslogLevel: '6' SyslogLevelPrefix: 'yes' SyslogPriority: '30' SystemCallErrorNumber: '0' TTYReset: 'no' TTYVHangup: 'no' TTYVTDisallocate: 'no' TasksAccounting: 'yes' TasksCurrent: '[not set]' TasksMax: '52428' TimeoutStartUSec: 1min 30s TimeoutStopUSec: 1min 30s TimerSlackNSec: '50000' Transient: 'no' Type: simple UID: '[not set]' UMask: '0022' UnitFilePreset: disabled UnitFileState: disabled UtmpMode: init Wants: local-fs.target network-online.target time-sync.target WatchdogTimestampMonotonic: '0' WatchdogUSec: '0' changed: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=true enabled: true name: ceph-mgr@e24-h21-740xd state: started status: ActiveEnterTimestampMonotonic: '0' ActiveExitTimestampMonotonic: '0' ActiveState: inactive After: -.mount basic.target systemd-tmpfiles-setup.service local-fs.target system-ceph\x2dmgr.slice systemd-journald.socket network-online.target sysinit.target tmp.mount time-sync.target AllowIsolate: 'no' AmbientCapabilities: '' AssertResult: 'no' AssertTimestampMonotonic: '0' Before: shutdown.target BlockIOAccounting: 'no' BlockIOWeight: '[not set]' CPUAccounting: 'no' CPUQuotaPerSecUSec: infinity CPUSchedulingPolicy: '0' CPUSchedulingPriority: '0' CPUSchedulingResetOnFork: 'no' CPUShares: '[not set]' CPUUsageNSec: '[not set]' CPUWeight: '[not set]' CacheDirectoryMode: '0755' CanIsolate: 'no' CanReload: 'yes' CanStart: 'yes' CanStop: 'yes' CapabilityBoundingSet: cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend CollectMode: inactive ConditionResult: 'no' ConditionTimestampMonotonic: '0' ConfigurationDirectoryMode: '0755' Conflicts: shutdown.target ControlPID: '0' DefaultDependencies: 'yes' Delegate: 'no' Description: Ceph cluster manager daemon DevicePolicy: closed DynamicUser: 'no' Environment: CLUSTER=ceph EnvironmentFiles: /etc/sysconfig/ceph (ignore_errors=yes) ExecMainCode: '0' ExecMainExitTimestampMonotonic: '0' ExecMainPID: '0' ExecMainStartTimestampMonotonic: '0' ExecMainStatus: '0' ExecReload: '{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }' ExecStart: '{ path=/usr/bin/ceph-mgr ; argv[]=/usr/bin/ceph-mgr -f --cluster ${CLUSTER} --id e24-h21-740xd --setuser ceph --setgroup ceph ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }' FailureAction: none FileDescriptorStoreMax: '0' FragmentPath: /usr/lib/systemd/system/ceph-mgr@.service GID: '[not set]' GuessMainPID: 'yes' IOAccounting: 'no' IOSchedulingClass: '0' IOSchedulingPriority: '0' IOWeight: '[not set]' IPAccounting: 'no' IPEgressBytes: '18446744073709551615' IPEgressPackets: '18446744073709551615' IPIngressBytes: '18446744073709551615' IPIngressPackets: '18446744073709551615' Id: ceph-mgr@e24-h21-740xd.service IgnoreOnIsolate: 'no' IgnoreSIGPIPE: 'yes' InactiveEnterTimestampMonotonic: '0' InactiveExitTimestampMonotonic: '0' JobRunningTimeoutUSec: infinity JobTimeoutAction: none JobTimeoutUSec: infinity KeyringMode: private KillMode: control-group KillSignal: '15' LimitAS: infinity LimitASSoft: infinity LimitCORE: infinity LimitCORESoft: infinity LimitCPU: infinity LimitCPUSoft: infinity LimitDATA: infinity LimitDATASoft: infinity LimitFSIZE: infinity LimitFSIZESoft: infinity LimitLOCKS: infinity LimitLOCKSSoft: infinity LimitMEMLOCK: '16777216' LimitMEMLOCKSoft: '16777216' LimitMSGQUEUE: '819200' LimitMSGQUEUESoft: '819200' LimitNICE: '0' LimitNICESoft: '0' LimitNOFILE: '1048576' LimitNOFILESoft: '1048576' LimitNPROC: '1048576' LimitNPROCSoft: '1048576' LimitRSS: infinity LimitRSSSoft: infinity LimitRTPRIO: '0' LimitRTPRIOSoft: '0' LimitRTTIME: infinity LimitRTTIMESoft: infinity LimitSIGPENDING: '767321' LimitSIGPENDINGSoft: '767321' LimitSTACK: infinity LimitSTACKSoft: '8388608' LoadState: loaded LockPersonality: 'yes' LogLevelMax: '-1' LogsDirectoryMode: '0755' MainPID: '0' MemoryAccounting: 'yes' MemoryCurrent: '[not set]' MemoryDenyWriteExecute: 'yes' MemoryHigh: infinity MemoryLimit: infinity MemoryLow: '0' MemoryMax: infinity MemorySwapMax: infinity MountAPIVFS: 'no' MountFlags: '' NFileDescriptorStore: '0' NRestarts: '0' Names: ceph-mgr@e24-h21-740xd.service NeedDaemonReload: 'no' Nice: '0' NoNewPrivileges: 'yes' NonBlocking: 'no' NotifyAccess: none OOMScoreAdjust: '0' OnFailureJobMode: replace PartOf: ceph-mgr.target PermissionsStartOnly: 'no' Perpetual: 'no' PrivateDevices: 'yes' PrivateMounts: 'no' PrivateNetwork: 'no' PrivateTmp: 'yes' PrivateUsers: 'no' ProtectControlGroups: 'yes' ProtectHome: 'yes' ProtectKernelModules: 'yes' ProtectKernelTunables: 'yes' ProtectSystem: full RefuseManualStart: 'no' RefuseManualStop: 'no' RemainAfterExit: 'no' RemoveIPC: 'no' Requires: -.mount system-ceph\x2dmgr.slice sysinit.target RequiresMountsFor: /var/tmp Restart: on-failure RestartUSec: 10s RestrictNamespaces: 'no' RestrictRealtime: 'no' Result: success RootDirectoryStartOnly: 'no' RuntimeDirectoryMode: '0755' RuntimeDirectoryPreserve: 'no' RuntimeMaxUSec: infinity SameProcessGroup: 'no' SecureBits: '0' SendSIGHUP: 'no' SendSIGKILL: 'yes' Slice: system-ceph\x2dmgr.slice StandardError: inherit StandardInput: 'null' StandardInputData: '' StandardOutput: journal StartLimitAction: none StartLimitBurst: '3' StartLimitIntervalUSec: 30min StartupBlockIOWeight: '[not set]' StartupCPUShares: '[not set]' StartupCPUWeight: '[not set]' StartupIOWeight: '[not set]' StateChangeTimestampMonotonic: '0' StateDirectoryMode: '0755' StatusErrno: '0' StopWhenUnneeded: 'no' SubState: dead SuccessAction: none SyslogFacility: '3' SyslogLevel: '6' SyslogLevelPrefix: 'yes' SyslogPriority: '30' SystemCallErrorNumber: '0' TTYReset: 'no' TTYVHangup: 'no' TTYVTDisallocate: 'no' TasksAccounting: 'yes' TasksCurrent: '[not set]' TasksMax: '52428' TimeoutStartUSec: 1min 30s TimeoutStopUSec: 1min 30s TimerSlackNSec: '50000' Transient: 'no' Type: simple UID: '[not set]' UMask: '0022' UnitFilePreset: disabled UnitFileState: disabled UtmpMode: init Wants: local-fs.target network-online.target time-sync.target WatchdogTimestampMonotonic: '0' WatchdogUSec: '0' TASK [ceph-mgr : include mgr_modules.yml] ******************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-mgr/tasks/main.yml:18 Wednesday 17 April 2019 11:25:43 +0000 (0:00:00.402) 0:02:10.098 ******* skipping: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h19-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h21-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False META: ran handlers TASK [set ceph manager install 'Complete'] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/site.yml:151 Wednesday 17 April 2019 11:25:43 +0000 (0:00:00.093) 0:02:10.192 ******* ok: [e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_stats: aggregate: true data: installer_phase_ceph_mgr: end: 20190417112543Z status: Complete per_host: false META: ran handlers PLAY [agents] *********************************************************************************************************************************************************************************************************************************************** skipping: no hosts matched PLAY [osds] ************************************************************************************************************************************************************************************************************************************************* TASK [set ceph osd install 'In Progress'] ******************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/site.yml:203 Wednesday 17 April 2019 11:25:43 +0000 (0:00:00.067) 0:02:10.260 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_stats: aggregate: true data: installer_phase_ceph_osd: start: 20190417112543Z status: In Progress per_host: false META: ran handlers TASK [ceph-facts : include facts.yml] *********************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/main.yml:2 Wednesday 17 April 2019 11:25:43 +0000 (0:00:00.055) 0:02:10.316 ******* statically imported: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml statically imported: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml included: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-facts : check if it is atomic host] ************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:2 Wednesday 17 April 2019 11:25:44 +0000 (0:00:00.219) 0:02:10.535 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: exists: false ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: exists: false ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: exists: false ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: exists: false TASK [ceph-facts : set_fact is_atomic] ********************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:7 Wednesday 17 April 2019 11:25:44 +0000 (0:00:00.248) 0:02:10.784 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: is_atomic: false ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: is_atomic: false ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: is_atomic: false ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: is_atomic: false TASK [ceph-facts : check if podman binary is present] ******************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:11 Wednesday 17 April 2019 11:25:44 +0000 (0:00:00.177) 0:02:10.961 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: atime: 1555434958.253481 attr_flags: '' attributes: [] block_size: 4096 blocks: 75536 charset: binary checksum: 82a6634d22fa558103952ca2debde1f4c9a20c1d ctime: 1555434948.5101976 dev: 64768 device_type: 0 executable: true exists: true gid: 0 gr_name: root inode: 125852333 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: application/x-sharedlib mode: '0755' mtime: 1549906438.0 nlink: 1 path: /usr/bin/podman pw_name: root readable: true rgrp: true roth: true rusr: true size: 38671280 uid: 0 version: '3091184885' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: atime: 1555434958.2956398 attr_flags: '' attributes: [] block_size: 4096 blocks: 75536 charset: binary checksum: 82a6634d22fa558103952ca2debde1f4c9a20c1d ctime: 1555434953.0554633 dev: 64768 device_type: 0 executable: true exists: true gid: 0 gr_name: root inode: 125857974 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: application/x-sharedlib mode: '0755' mtime: 1549906438.0 nlink: 1 path: /usr/bin/podman pw_name: root readable: true rgrp: true roth: true rusr: true size: 38671280 uid: 0 version: '851693758' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: atime: 1555434958.3109217 attr_flags: '' attributes: [] block_size: 4096 blocks: 75536 charset: binary checksum: 82a6634d22fa558103952ca2debde1f4c9a20c1d ctime: 1555434950.687676 dev: 64768 device_type: 0 executable: true exists: true gid: 0 gr_name: root inode: 125852333 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: application/x-sharedlib mode: '0755' mtime: 1549906438.0 nlink: 1 path: /usr/bin/podman pw_name: root readable: true rgrp: true roth: true rusr: true size: 38671280 uid: 0 version: '1672937529' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: atime: 1555434958.3495007 attr_flags: '' attributes: [] block_size: 4096 blocks: 75536 charset: binary checksum: 82a6634d22fa558103952ca2debde1f4c9a20c1d ctime: 1555434950.3542788 dev: 64768 device_type: 0 executable: true exists: true gid: 0 gr_name: root inode: 125852333 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: application/x-sharedlib mode: '0755' mtime: 1549906438.0 nlink: 1 path: /usr/bin/podman pw_name: root readable: true rgrp: true roth: true rusr: true size: 38671280 uid: 0 version: '1197023092' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true TASK [ceph-facts : set_fact is_podman] ********************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:16 Wednesday 17 April 2019 11:25:44 +0000 (0:00:00.354) 0:02:11.316 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: is_podman: true ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: is_podman: true ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: is_podman: true ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: is_podman: true TASK [ceph-facts : set_fact container_binary] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:20 Wednesday 17 April 2019 11:25:45 +0000 (0:00:00.125) 0:02:11.441 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_release ceph_stable_release] *********************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:26 Wednesday 17 April 2019 11:25:45 +0000 (0:00:00.108) 0:02:11.549 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: dummy ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: dummy ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: dummy ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: dummy TASK [ceph-facts : set_fact monitor_name ansible_hostname] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:30 Wednesday 17 April 2019 11:25:45 +0000 (0:00:00.178) 0:02:11.728 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: monitor_name: e23-h05-740xd ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: monitor_name: e24-h05-740xd ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: monitor_name: e24-h07-740xd ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: monitor_name: e23-h07-740xd TASK [ceph-facts : set_fact monitor_name ansible_fqdn] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:36 Wednesday 17 April 2019 11:25:45 +0000 (0:00:00.175) 0:02:11.903 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact docker_exec_cmd] **************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:42 Wednesday 17 April 2019 11:25:45 +0000 (0:00:00.131) 0:02:12.034 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : is ceph running already?] **************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:53 Wednesday 17 April 2019 11:25:45 +0000 (0:00:00.200) 0:02:12.235 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com -> e24-h17-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - timeout - --foreground - -s - KILL - 300s - ceph - --cluster - ceph - -s - -f - json delta: '0:00:00.203321' end: '2019-04-17 11:25:46.219473' failed_when_result: false rc: 0 start: '2019-04-17 11:25:46.016152' stderr: '' stderr_lines: [] stdout: |2- {"fsid":"b6d61d41-5c6b-4c17-98be-f37217166318","health":{"checks":{},"status":"HEALTH_OK"},"election_epoch":4,"quorum":[0,1,2],"quorum_names":["e24-h17-740xd","e24-h19-740xd","e24-h21-740xd"],"quorum_age":38,"monmap":{"epoch":1,"fsid":"b6d61d41-5c6b-4c17-98be-f37217166318","modified":"2019-04-17 11:25:04.256284","created":"2019-04-17 11:25:04.256284","min_mon_release":14,"min_mon_release_name":"nautilus","features":{"persistent":["kraken","luminous","mimic","osdmap-prune","nautilus"],"optional":[]},"mons":[{"rank":0,"name":"e24-h17-740xd","public_addrs":{"addrvec":[{"type":"v2","addr":"10.1.24.17:3300","nonce":0},{"type":"v1","addr":"10.1.24.17:6789","nonce":0}]},"addr":"10.1.24.17:6789/0","public_addr":"10.1.24.17:6789/0"},{"rank":1,"name":"e24-h19-740xd","public_addrs":{"addrvec":[{"type":"v2","addr":"10.1.24.19:3300","nonce":0},{"type":"v1","addr":"10.1.24.19:6789","nonce":0}]},"addr":"10.1.24.19:6789/0","public_addr":"10.1.24.19:6789/0"},{"rank":2,"name":"e24-h21-740xd","public_addrs":{"addrvec":[{"type":"v2","addr":"10.1.24.21:3300","nonce":0},{"type":"v1","addr":"10.1.24.21:6789","nonce":0}]},"addr":"10.1.24.21:6789/0","public_addr":"10.1.24.21:6789/0"}]},"osdmap":{"osdmap":{"epoch":1,"num_osds":0,"num_up_osds":0,"num_in_osds":0,"full":false,"nearfull":false,"num_remapped_pgs":0}},"pgmap":{"pgs_by_state":[],"num_pgs":0,"num_pools":0,"num_objects":0,"data_bytes":0,"bytes_used":0,"bytes_avail":0,"bytes_total":0},"fsmap":{"epoch":1,"by_rank":[],"up:standby":0},"mgrmap":{"epoch":2,"active_gid":4284,"active_name":"e24-h17-740xd","active_addrs":{"addrvec":[]},"active_addr":":/0","active_change":"2019-04-17 11:25:45.308797","available":false,"standbys":[],"modules":["iostat","restful"],"available_modules":[{"name":"ansible","can_run":true,"error_string":"","module_options":{"password":{"name":"password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_url":{"name":"server_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"username":{"name":"username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"verify_server":{"name":"verify_server","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"balancer","can_run":true,"error_string":"","module_options":{"active":{"name":"active","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"automatically balance PGs across cluster","long_desc":"","tags":[],"see_also":[]},"begin_time":{"name":"begin_time","type":"str","level":"advanced","flags":1,"default_value":"0000","min":"","max":"","enum_allowed":[],"desc":"beginning time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"begin_weekday":{"name":"begin_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"7","enum_allowed":[],"desc":"Restrict automatic balancing to this day of the week or later","long_desc":"0 or 7 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"crush_compat_max_iterations":{"name":"crush_compat_max_iterations","type":"uint","level":"advanced","flags":1,"default_value":"25","min":"1","max":"250","enum_allowed":[],"desc":"maximum number of iterations to attempt optimization","long_desc":"","tags":[],"see_also":[]},"crush_compat_metrics":{"name":"crush_compat_metrics","type":"str","level":"advanced","flags":1,"default_value":"pgs,objects,bytes","min":"","max":"","enum_allowed":[],"desc":"metrics with which to calculate OSD utilization","long_desc":"Value is a list of one or more of \"pgs\", \"objects\", or \"bytes\", and indicates which metrics to use to balance utilization.","tags":[],"see_also":[]},"crush_compat_step":{"name":"crush_compat_step","type":"float","level":"advanced","flags":1,"default_value":"0.5","min":"0.001","max":"0.999","enum_allowed":[],"desc":"aggressiveness of optimization","long_desc":".99 is very aggressive, .01 is less aggressive","tags":[],"see_also":[]},"end_time":{"name":"end_time","type":"str","level":"advanced","flags":1,"default_value":"2400","min":"","max":"","enum_allowed":[],"desc":"ending time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"end_weekday":{"name":"end_weekday","type":"uint","level":"advanced","flags":1,"default_value":"7","min":"0","max":"7","enum_allowed":[],"desc":"Restrict automatic balancing to days of the week earlier than this","long_desc":"0 or 7 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"min_score":{"name":"min_score","type":"float","level":"advanced","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"minimum score, below which no optimization is attempted","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":1,"default_value":"none","min":"","max":"","enum_allowed":["crush-compat","none","upmap"],"desc":"Balancer mode","long_desc":"","tags":[],"see_also":[]},"pool_ids":{"name":"pool_ids","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"pools which the automatic balancing will be limited to","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and attempt optimization","long_desc":"","tags":[],"see_also":[]},"upmap_max_deviation":{"name":"upmap_max_deviation","type":"float","level":"advanced","flags":1,"default_value":"0.01","min":"0","max":"1","enum_allowed":[],"desc":"deviation below which no optimization is attempted","long_desc":"If the ratio between the fullest and least-full OSD is below this value then we stop trying to optimize placement.","tags":[],"see_also":[]},"upmap_max_iterations":{"name":"upmap_max_iterations","type":"uint","level":"advanced","flags":1,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"maximum upmap optimization iterations","long_desc":"","tags":[],"see_also":[]}}},{"name":"crash","can_run":true,"error_string":"","module_options":{}},{"name":"deepsea","can_run":true,"error_string":"","module_options":{"salt_api_eauth":{"name":"salt_api_eauth","type":"str","level":"advanced","flags":0,"default_value":"sharedsecret","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"salt_api_password":{"name":"salt_api_password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"salt_api_url":{"name":"salt_api_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"salt_api_username":{"name":"salt_api_username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"devicehealth","can_run":true,"error_string":"","module_options":{"enable_monitoring":{"name":"enable_monitoring","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"monitor device health metrics","long_desc":"","tags":[],"see_also":[]},"mark_out_threshold":{"name":"mark_out_threshold","type":"secs","level":"advanced","flags":1,"default_value":"2419200","min":"","max":"","enum_allowed":[],"desc":"automatically mark OSD if it may fail before this long","long_desc":"","tags":[],"see_also":[]},"pool_name":{"name":"pool_name","type":"str","level":"advanced","flags":1,"default_value":"device_health_metrics","min":"","max":"","enum_allowed":[],"desc":"name of pool in which to store device health metrics","long_desc":"","tags":[],"see_also":[]},"retention_period":{"name":"retention_period","type":"secs","level":"advanced","flags":1,"default_value":"15552000","min":"","max":"","enum_allowed":[],"desc":"how long to retain device health metrics","long_desc":"","tags":[],"see_also":[]},"scrape_frequency":{"name":"scrape_frequency","type":"secs","level":"advanced","flags":1,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"how frequently to scrape device health metrics","long_desc":"","tags":[],"see_also":[]},"self_heal":{"name":"self_heal","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"preemptively heal cluster around devices that may fail","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and check device health","long_desc":"","tags":[],"see_also":[]},"warn_threshold":{"name":"warn_threshold","type":"secs","level":"advanced","flags":1,"default_value":"7257600","min":"","max":"","enum_allowed":[],"desc":"raise health warning if OSD may fail before this long","long_desc":"","tags":[],"see_also":[]}}},{"name":"influx","can_run":false,"error_string":"influxdb python module not found","module_options":{"batch_size":{"name":"batch_size","type":"str","level":"advanced","flags":0,"default_value":"5000","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"database":{"name":"database","type":"str","level":"advanced","flags":0,"default_value":"ceph","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"hostname":{"name":"hostname","type":"str","level":"advanced","flags":0,"default_value":"None","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"str","level":"advanced","flags":0,"default_value":"30","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"password":{"name":"password","type":"str","level":"advanced","flags":0,"default_value":"None","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"port":{"name":"port","type":"str","level":"advanced","flags":0,"default_value":"8086","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"str","level":"advanced","flags":0,"default_value":"false","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"threads":{"name":"threads","type":"str","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"username":{"name":"username","type":"str","level":"advanced","flags":0,"default_value":"None","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"verify_ssl":{"name":"verify_ssl","type":"str","level":"advanced","flags":0,"default_value":"true","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"insights","can_run":true,"error_string":"","module_options":{}},{"name":"iostat","can_run":true,"error_string":"","module_options":{}},{"name":"localpool","can_run":true,"error_string":"","module_options":{"failure_domain":{"name":"failure_domain","type":"str","level":"advanced","flags":1,"default_value":"host","min":"","max":"","enum_allowed":[],"desc":"failure domain for any created local pool","long_desc":"","tags":[],"see_also":[]},"min_size":{"name":"min_size","type":"int","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"default min_size for any created local pool","long_desc":"","tags":[],"see_also":[]},"num_rep":{"name":"num_rep","type":"int","level":"advanced","flags":1,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"default replica count for any created local pool","long_desc":"","tags":[],"see_also":[]},"pg_num":{"name":"pg_num","type":"int","level":"advanced","flags":1,"default_value":"128","min":"","max":"","enum_allowed":[],"desc":"default pg_num for any created local pool","long_desc":"","tags":[],"see_also":[]},"prefix":{"name":"prefix","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"name prefix for any created local pool","long_desc":"","tags":[],"see_also":[]},"subtree":{"name":"subtree","type":"str","level":"advanced","flags":1,"default_value":"rack","min":"","max":"","enum_allowed":[],"desc":"CRUSH level for which to create a local pool","long_desc":"","tags":[],"see_also":[]}}},{"name":"orchestrator_cli","can_run":true,"error_string":"","module_options":{"orchestrator":{"name":"orchestrator","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"pg_autoscaler","can_run":true,"error_string":"","module_options":{"sleep_interval":{"name":"sleep_interval","type":"str","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"progress","can_run":true,"error_string":"","module_options":{"max_completed_events":{"name":"max_completed_events","type":"int","level":"advanced","flags":1,"default_value":"50","min":"","max":"","enum_allowed":[],"desc":"number of past completed events to remember","long_desc":"","tags":[],"see_also":[]},"persist_interval":{"name":"persist_interval","type":"secs","level":"advanced","flags":1,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"how frequently to persist completed events","long_desc":"","tags":[],"see_also":[]}}},{"name":"prometheus","can_run":true,"error_string":"","module_options":{"rbd_stats_pools":{"name":"rbd_stats_pools","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools_refresh_interval":{"name":"rbd_stats_pools_refresh_interval","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"scrape_interval":{"name":"scrape_interval","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rbd_support","can_run":true,"error_string":"","module_options":{}},{"name":"restful","can_run":true,"error_string":"","module_options":{"key_file":{"name":"key_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"selftest","can_run":true,"error_string":"","module_options":{"roption1":{"name":"roption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption2":{"name":"roption2","type":"str","level":"advanced","flags":0,"default_value":"xyz","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption1":{"name":"rwoption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption2":{"name":"rwoption2","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption3":{"name":"rwoption3","type":"float","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption4":{"name":"rwoption4","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption5":{"name":"rwoption5","type":"bool","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption6":{"name":"rwoption6","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testkey":{"name":"testkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testlkey":{"name":"testlkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testnewline":{"name":"testnewline","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"status","can_run":true,"error_string":"","module_options":{}},{"name":"telegraf","can_run":true,"error_string":"","module_options":{"address":{"name":"address","type":"str","level":"advanced","flags":0,"default_value":"unixgram:///tmp/telegraf.sock","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"15","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telemetry","can_run":true,"error_string":"","module_options":{"contact":{"name":"contact","type":"str","level":"advanced","flags":0,"default_value":"None","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"description":{"name":"description","type":"str","level":"advanced","flags":0,"default_value":"None","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"int","level":"advanced","flags":0,"default_value":"72","min":"24","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard":{"name":"leaderboard","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"organization":{"name":"organization","type":"str","level":"advanced","flags":0,"default_value":"None","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"proxy":{"name":"proxy","type":"str","level":"advanced","flags":0,"default_value":"None","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url":{"name":"url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/report","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"test_orchestrator","can_run":true,"error_string":"","module_options":{}},{"name":"volumes","can_run":true,"error_string":"","module_options":{}},{"name":"zabbix","can_run":true,"error_string":"","module_options":{"identifier":{"name":"identifier","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"zabbix_host":{"name":"zabbix_host","type":"str","level":"advanced","flags":0,"default_value":"None","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"zabbix_port":{"name":"zabbix_port","type":"int","level":"advanced","flags":0,"default_value":"10051","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"zabbix_sender":{"name":"zabbix_sender","type":"str","level":"advanced","flags":0,"default_value":"/usr/bin/zabbix_sender","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}}],"services":{},"always_on_modules":{"nautilus":["balancer","crash","devicehealth","orchestrator_cli","progress","status","volumes"]}},"servicemap":{"epoch":1,"modified":"0.000000","services":{}},"progress_events":{}} stdout_lines: TASK [ceph-facts : set_fact ceph_current_status rc 1] ******************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:67 Wednesday 17 April 2019 11:25:46 +0000 (0:00:00.457) 0:02:12.692 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : create a local fetch directory if it does not exist] ************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:74 Wednesday 17 April 2019 11:25:46 +0000 (0:00:00.113) 0:02:12.805 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com -> localhost] => changed=false gid: 0 group: root mode: '0777' owner: root path: /ceph-ansible-keys size: 138 state: directory uid: 0 ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com -> localhost] => changed=false gid: 0 group: root mode: '0777' owner: root path: /ceph-ansible-keys size: 138 state: directory uid: 0 ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com -> localhost] => changed=false gid: 0 group: root mode: '0777' owner: root path: /ceph-ansible-keys size: 138 state: directory uid: 0 ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com -> localhost] => changed=false gid: 0 group: root mode: '0777' owner: root path: /ceph-ansible-keys size: 138 state: directory uid: 0 TASK [ceph-facts : set_fact ceph_current_status (convert to json)] ****************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:84 Wednesday 17 April 2019 11:25:46 +0000 (0:00:00.289) 0:02:13.095 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_current_status: election_epoch: 4 fsid: b6d61d41-5c6b-4c17-98be-f37217166318 fsmap: by_rank: [] epoch: 1 up:standby: 0 health: checks: {} status: HEALTH_OK mgrmap: active_addr: :/0 active_addrs: addrvec: [] active_change: '2019-04-17 11:25:45.308797' active_gid: 4284 active_name: e24-h17-740xd always_on_modules: nautilus: - balancer - crash - devicehealth - orchestrator_cli - progress - status - volumes available: false available_modules: - can_run: true error_string: '' module_options: password: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: password see_also: [] tags: [] type: str server_url: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_url see_also: [] tags: [] type: str username: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: username see_also: [] tags: [] type: str verify_server: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: verify_server see_also: [] tags: [] type: str name: ansible - can_run: true error_string: '' module_options: active: default_value: 'False' desc: automatically balance PGs across cluster enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: active see_also: [] tags: [] type: bool begin_time: default_value: '0000' desc: beginning time of day to automatically balance enum_allowed: [] flags: 1 level: advanced long_desc: This is a time of day in the format HHMM. max: '' min: '' name: begin_time see_also: [] tags: [] type: str begin_weekday: default_value: '0' desc: Restrict automatic balancing to this day of the week or later enum_allowed: [] flags: 1 level: advanced long_desc: 0 or 7 = Sunday, 1 = Monday, etc. max: '7' min: '0' name: begin_weekday see_also: [] tags: [] type: uint crush_compat_max_iterations: default_value: '25' desc: maximum number of iterations to attempt optimization enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '250' min: '1' name: crush_compat_max_iterations see_also: [] tags: [] type: uint crush_compat_metrics: default_value: pgs,objects,bytes desc: metrics with which to calculate OSD utilization enum_allowed: [] flags: 1 level: advanced long_desc: Value is a list of one or more of "pgs", "objects", or "bytes", and indicates which metrics to use to balance utilization. max: '' min: '' name: crush_compat_metrics see_also: [] tags: [] type: str crush_compat_step: default_value: '0.5' desc: aggressiveness of optimization enum_allowed: [] flags: 1 level: advanced long_desc: .99 is very aggressive, .01 is less aggressive max: '0.999' min: '0.001' name: crush_compat_step see_also: [] tags: [] type: float end_time: default_value: '2400' desc: ending time of day to automatically balance enum_allowed: [] flags: 1 level: advanced long_desc: This is a time of day in the format HHMM. max: '' min: '' name: end_time see_also: [] tags: [] type: str end_weekday: default_value: '7' desc: Restrict automatic balancing to days of the week earlier than this enum_allowed: [] flags: 1 level: advanced long_desc: 0 or 7 = Sunday, 1 = Monday, etc. max: '7' min: '0' name: end_weekday see_also: [] tags: [] type: uint min_score: default_value: '0' desc: minimum score, below which no optimization is attempted enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: min_score see_also: [] tags: [] type: float mode: default_value: none desc: Balancer mode enum_allowed: - crush-compat - none - upmap flags: 1 level: advanced long_desc: '' max: '' min: '' name: mode see_also: [] tags: [] type: str pool_ids: default_value: '' desc: pools which the automatic balancing will be limited to enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pool_ids see_also: [] tags: [] type: str sleep_interval: default_value: '60' desc: how frequently to wake up and attempt optimization enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: secs upmap_max_deviation: default_value: '0.01' desc: deviation below which no optimization is attempted enum_allowed: [] flags: 1 level: advanced long_desc: If the ratio between the fullest and least-full OSD is below this value then we stop trying to optimize placement. max: '1' min: '0' name: upmap_max_deviation see_also: [] tags: [] type: float upmap_max_iterations: default_value: '10' desc: maximum upmap optimization iterations enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: upmap_max_iterations see_also: [] tags: [] type: uint name: balancer - can_run: true error_string: '' module_options: {} name: crash - can_run: true error_string: '' module_options: salt_api_eauth: default_value: sharedsecret desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_eauth see_also: [] tags: [] type: str salt_api_password: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_password see_also: [] tags: [] type: str salt_api_url: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_url see_also: [] tags: [] type: str salt_api_username: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_username see_also: [] tags: [] type: str name: deepsea - can_run: true error_string: '' module_options: enable_monitoring: default_value: 'False' desc: monitor device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: enable_monitoring see_also: [] tags: [] type: bool mark_out_threshold: default_value: '2419200' desc: automatically mark OSD if it may fail before this long enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: mark_out_threshold see_also: [] tags: [] type: secs pool_name: default_value: device_health_metrics desc: name of pool in which to store device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pool_name see_also: [] tags: [] type: str retention_period: default_value: '15552000' desc: how long to retain device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: retention_period see_also: [] tags: [] type: secs scrape_frequency: default_value: '86400' desc: how frequently to scrape device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: scrape_frequency see_also: [] tags: [] type: secs self_heal: default_value: 'True' desc: preemptively heal cluster around devices that may fail enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: self_heal see_also: [] tags: [] type: bool sleep_interval: default_value: '600' desc: how frequently to wake up and check device health enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: secs warn_threshold: default_value: '7257600' desc: raise health warning if OSD may fail before this long enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: warn_threshold see_also: [] tags: [] type: secs name: devicehealth - can_run: false error_string: influxdb python module not found module_options: batch_size: default_value: '5000' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: batch_size see_also: [] tags: [] type: str database: default_value: ceph desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: database see_also: [] tags: [] type: str hostname: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: hostname see_also: [] tags: [] type: str interval: default_value: '30' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: str password: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: password see_also: [] tags: [] type: str port: default_value: '8086' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: port see_also: [] tags: [] type: str ssl: default_value: 'false' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: ssl see_also: [] tags: [] type: str threads: default_value: '5' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: threads see_also: [] tags: [] type: str username: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: username see_also: [] tags: [] type: str verify_ssl: default_value: 'true' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: verify_ssl see_also: [] tags: [] type: str name: influx - can_run: true error_string: '' module_options: {} name: insights - can_run: true error_string: '' module_options: {} name: iostat - can_run: true error_string: '' module_options: failure_domain: default_value: host desc: failure domain for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: failure_domain see_also: [] tags: [] type: str min_size: default_value: '' desc: default min_size for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: min_size see_also: [] tags: [] type: int num_rep: default_value: '3' desc: default replica count for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: num_rep see_also: [] tags: [] type: int pg_num: default_value: '128' desc: default pg_num for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pg_num see_also: [] tags: [] type: int prefix: default_value: '' desc: name prefix for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: prefix see_also: [] tags: [] type: str subtree: default_value: rack desc: CRUSH level for which to create a local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: subtree see_also: [] tags: [] type: str name: localpool - can_run: true error_string: '' module_options: orchestrator: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: orchestrator see_also: [] tags: [] type: str name: orchestrator_cli - can_run: true error_string: '' module_options: sleep_interval: default_value: '60' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: str name: pg_autoscaler - can_run: true error_string: '' module_options: max_completed_events: default_value: '50' desc: number of past completed events to remember enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: max_completed_events see_also: [] tags: [] type: int persist_interval: default_value: '5' desc: how frequently to persist completed events enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: persist_interval see_also: [] tags: [] type: secs name: progress - can_run: true error_string: '' module_options: rbd_stats_pools: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rbd_stats_pools see_also: [] tags: [] type: str rbd_stats_pools_refresh_interval: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rbd_stats_pools_refresh_interval see_also: [] tags: [] type: str scrape_interval: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: scrape_interval see_also: [] tags: [] type: str server_addr: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_addr see_also: [] tags: [] type: str server_port: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_port see_also: [] tags: [] type: str name: prometheus - can_run: true error_string: '' module_options: {} name: rbd_support - can_run: true error_string: '' module_options: key_file: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: key_file see_also: [] tags: [] type: str server_addr: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_addr see_also: [] tags: [] type: str server_port: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_port see_also: [] tags: [] type: str name: restful - can_run: true error_string: '' module_options: roption1: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: roption1 see_also: [] tags: [] type: str roption2: default_value: xyz desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: roption2 see_also: [] tags: [] type: str rwoption1: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption1 see_also: [] tags: [] type: str rwoption2: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption2 see_also: [] tags: [] type: int rwoption3: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption3 see_also: [] tags: [] type: float rwoption4: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption4 see_also: [] tags: [] type: str rwoption5: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption5 see_also: [] tags: [] type: bool rwoption6: default_value: 'True' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption6 see_also: [] tags: [] type: bool testkey: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testkey see_also: [] tags: [] type: str testlkey: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testlkey see_also: [] tags: [] type: str testnewline: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testnewline see_also: [] tags: [] type: str name: selftest - can_run: true error_string: '' module_options: {} name: status - can_run: true error_string: '' module_options: address: default_value: unixgram:///tmp/telegraf.sock desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: address see_also: [] tags: [] type: str interval: default_value: '15' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: secs name: telegraf - can_run: true error_string: '' module_options: contact: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: contact see_also: [] tags: [] type: str description: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: description see_also: [] tags: [] type: str enabled: default_value: 'False' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: enabled see_also: [] tags: [] type: bool interval: default_value: '72' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '24' name: interval see_also: [] tags: [] type: int leaderboard: default_value: 'False' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: leaderboard see_also: [] tags: [] type: bool organization: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: organization see_also: [] tags: [] type: str proxy: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: proxy see_also: [] tags: [] type: str url: default_value: https://telemetry.ceph.com/report desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: url see_also: [] tags: [] type: str name: telemetry - can_run: true error_string: '' module_options: {} name: test_orchestrator - can_run: true error_string: '' module_options: {} name: volumes - can_run: true error_string: '' module_options: identifier: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: identifier see_also: [] tags: [] type: str interval: default_value: '60' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: secs zabbix_host: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_host see_also: [] tags: [] type: str zabbix_port: default_value: '10051' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_port see_also: [] tags: [] type: int zabbix_sender: default_value: /usr/bin/zabbix_sender desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_sender see_also: [] tags: [] type: str name: zabbix epoch: 2 modules: - iostat - restful services: {} standbys: [] monmap: created: '2019-04-17 11:25:04.256284' epoch: 1 features: optional: [] persistent: - kraken - luminous - mimic - osdmap-prune - nautilus fsid: b6d61d41-5c6b-4c17-98be-f37217166318 min_mon_release: 14 min_mon_release_name: nautilus modified: '2019-04-17 11:25:04.256284' mons: - addr: 10.1.24.17:6789/0 name: e24-h17-740xd public_addr: 10.1.24.17:6789/0 public_addrs: addrvec: - addr: 10.1.24.17:3300 nonce: 0 type: v2 - addr: 10.1.24.17:6789 nonce: 0 type: v1 rank: 0 - addr: 10.1.24.19:6789/0 name: e24-h19-740xd public_addr: 10.1.24.19:6789/0 public_addrs: addrvec: - addr: 10.1.24.19:3300 nonce: 0 type: v2 - addr: 10.1.24.19:6789 nonce: 0 type: v1 rank: 1 - addr: 10.1.24.21:6789/0 name: e24-h21-740xd public_addr: 10.1.24.21:6789/0 public_addrs: addrvec: - addr: 10.1.24.21:3300 nonce: 0 type: v2 - addr: 10.1.24.21:6789 nonce: 0 type: v1 rank: 2 osdmap: osdmap: epoch: 1 full: false nearfull: false num_in_osds: 0 num_osds: 0 num_remapped_pgs: 0 num_up_osds: 0 pgmap: bytes_avail: 0 bytes_total: 0 bytes_used: 0 data_bytes: 0 num_objects: 0 num_pgs: 0 num_pools: 0 pgs_by_state: [] progress_events: {} quorum: - 0 - 1 - 2 quorum_age: 38 quorum_names: - e24-h17-740xd - e24-h19-740xd - e24-h21-740xd servicemap: epoch: 1 modified: '0.000000' services: {} ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_current_status: election_epoch: 4 fsid: b6d61d41-5c6b-4c17-98be-f37217166318 fsmap: by_rank: [] epoch: 1 up:standby: 0 health: checks: {} status: HEALTH_OK mgrmap: active_addr: :/0 active_addrs: addrvec: [] active_change: '2019-04-17 11:25:45.308797' active_gid: 4284 active_name: e24-h17-740xd always_on_modules: nautilus: - balancer - crash - devicehealth - orchestrator_cli - progress - status - volumes available: false available_modules: - can_run: true error_string: '' module_options: password: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: password see_also: [] tags: [] type: str server_url: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_url see_also: [] tags: [] type: str username: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: username see_also: [] tags: [] type: str verify_server: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: verify_server see_also: [] tags: [] type: str name: ansible - can_run: true error_string: '' module_options: active: default_value: 'False' desc: automatically balance PGs across cluster enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: active see_also: [] tags: [] type: bool begin_time: default_value: '0000' desc: beginning time of day to automatically balance enum_allowed: [] flags: 1 level: advanced long_desc: This is a time of day in the format HHMM. max: '' min: '' name: begin_time see_also: [] tags: [] type: str begin_weekday: default_value: '0' desc: Restrict automatic balancing to this day of the week or later enum_allowed: [] flags: 1 level: advanced long_desc: 0 or 7 = Sunday, 1 = Monday, etc. max: '7' min: '0' name: begin_weekday see_also: [] tags: [] type: uint crush_compat_max_iterations: default_value: '25' desc: maximum number of iterations to attempt optimization enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '250' min: '1' name: crush_compat_max_iterations see_also: [] tags: [] type: uint crush_compat_metrics: default_value: pgs,objects,bytes desc: metrics with which to calculate OSD utilization enum_allowed: [] flags: 1 level: advanced long_desc: Value is a list of one or more of "pgs", "objects", or "bytes", and indicates which metrics to use to balance utilization. max: '' min: '' name: crush_compat_metrics see_also: [] tags: [] type: str crush_compat_step: default_value: '0.5' desc: aggressiveness of optimization enum_allowed: [] flags: 1 level: advanced long_desc: .99 is very aggressive, .01 is less aggressive max: '0.999' min: '0.001' name: crush_compat_step see_also: [] tags: [] type: float end_time: default_value: '2400' desc: ending time of day to automatically balance enum_allowed: [] flags: 1 level: advanced long_desc: This is a time of day in the format HHMM. max: '' min: '' name: end_time see_also: [] tags: [] type: str end_weekday: default_value: '7' desc: Restrict automatic balancing to days of the week earlier than this enum_allowed: [] flags: 1 level: advanced long_desc: 0 or 7 = Sunday, 1 = Monday, etc. max: '7' min: '0' name: end_weekday see_also: [] tags: [] type: uint min_score: default_value: '0' desc: minimum score, below which no optimization is attempted enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: min_score see_also: [] tags: [] type: float mode: default_value: none desc: Balancer mode enum_allowed: - crush-compat - none - upmap flags: 1 level: advanced long_desc: '' max: '' min: '' name: mode see_also: [] tags: [] type: str pool_ids: default_value: '' desc: pools which the automatic balancing will be limited to enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pool_ids see_also: [] tags: [] type: str sleep_interval: default_value: '60' desc: how frequently to wake up and attempt optimization enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: secs upmap_max_deviation: default_value: '0.01' desc: deviation below which no optimization is attempted enum_allowed: [] flags: 1 level: advanced long_desc: If the ratio between the fullest and least-full OSD is below this value then we stop trying to optimize placement. max: '1' min: '0' name: upmap_max_deviation see_also: [] tags: [] type: float upmap_max_iterations: default_value: '10' desc: maximum upmap optimization iterations enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: upmap_max_iterations see_also: [] tags: [] type: uint name: balancer - can_run: true error_string: '' module_options: {} name: crash - can_run: true error_string: '' module_options: salt_api_eauth: default_value: sharedsecret desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_eauth see_also: [] tags: [] type: str salt_api_password: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_password see_also: [] tags: [] type: str salt_api_url: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_url see_also: [] tags: [] type: str salt_api_username: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_username see_also: [] tags: [] type: str name: deepsea - can_run: true error_string: '' module_options: enable_monitoring: default_value: 'False' desc: monitor device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: enable_monitoring see_also: [] tags: [] type: bool mark_out_threshold: default_value: '2419200' desc: automatically mark OSD if it may fail before this long enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: mark_out_threshold see_also: [] tags: [] type: secs pool_name: default_value: device_health_metrics desc: name of pool in which to store device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pool_name see_also: [] tags: [] type: str retention_period: default_value: '15552000' desc: how long to retain device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: retention_period see_also: [] tags: [] type: secs scrape_frequency: default_value: '86400' desc: how frequently to scrape device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: scrape_frequency see_also: [] tags: [] type: secs self_heal: default_value: 'True' desc: preemptively heal cluster around devices that may fail enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: self_heal see_also: [] tags: [] type: bool sleep_interval: default_value: '600' desc: how frequently to wake up and check device health enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: secs warn_threshold: default_value: '7257600' desc: raise health warning if OSD may fail before this long enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: warn_threshold see_also: [] tags: [] type: secs name: devicehealth - can_run: false error_string: influxdb python module not found module_options: batch_size: default_value: '5000' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: batch_size see_also: [] tags: [] type: str database: default_value: ceph desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: database see_also: [] tags: [] type: str hostname: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: hostname see_also: [] tags: [] type: str interval: default_value: '30' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: str password: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: password see_also: [] tags: [] type: str port: default_value: '8086' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: port see_also: [] tags: [] type: str ssl: default_value: 'false' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: ssl see_also: [] tags: [] type: str threads: default_value: '5' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: threads see_also: [] tags: [] type: str username: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: username see_also: [] tags: [] type: str verify_ssl: default_value: 'true' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: verify_ssl see_also: [] tags: [] type: str name: influx - can_run: true error_string: '' module_options: {} name: insights - can_run: true error_string: '' module_options: {} name: iostat - can_run: true error_string: '' module_options: failure_domain: default_value: host desc: failure domain for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: failure_domain see_also: [] tags: [] type: str min_size: default_value: '' desc: default min_size for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: min_size see_also: [] tags: [] type: int num_rep: default_value: '3' desc: default replica count for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: num_rep see_also: [] tags: [] type: int pg_num: default_value: '128' desc: default pg_num for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pg_num see_also: [] tags: [] type: int prefix: default_value: '' desc: name prefix for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: prefix see_also: [] tags: [] type: str subtree: default_value: rack desc: CRUSH level for which to create a local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: subtree see_also: [] tags: [] type: str name: localpool - can_run: true error_string: '' module_options: orchestrator: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: orchestrator see_also: [] tags: [] type: str name: orchestrator_cli - can_run: true error_string: '' module_options: sleep_interval: default_value: '60' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: str name: pg_autoscaler - can_run: true error_string: '' module_options: max_completed_events: default_value: '50' desc: number of past completed events to remember enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: max_completed_events see_also: [] tags: [] type: int persist_interval: default_value: '5' desc: how frequently to persist completed events enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: persist_interval see_also: [] tags: [] type: secs name: progress - can_run: true error_string: '' module_options: rbd_stats_pools: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rbd_stats_pools see_also: [] tags: [] type: str rbd_stats_pools_refresh_interval: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rbd_stats_pools_refresh_interval see_also: [] tags: [] type: str scrape_interval: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: scrape_interval see_also: [] tags: [] type: str server_addr: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_addr see_also: [] tags: [] type: str server_port: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_port see_also: [] tags: [] type: str name: prometheus - can_run: true error_string: '' module_options: {} name: rbd_support - can_run: true error_string: '' module_options: key_file: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: key_file see_also: [] tags: [] type: str server_addr: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_addr see_also: [] tags: [] type: str server_port: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_port see_also: [] tags: [] type: str name: restful - can_run: true error_string: '' module_options: roption1: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: roption1 see_also: [] tags: [] type: str roption2: default_value: xyz desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: roption2 see_also: [] tags: [] type: str rwoption1: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption1 see_also: [] tags: [] type: str rwoption2: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption2 see_also: [] tags: [] type: int rwoption3: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption3 see_also: [] tags: [] type: float rwoption4: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption4 see_also: [] tags: [] type: str rwoption5: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption5 see_also: [] tags: [] type: bool rwoption6: default_value: 'True' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption6 see_also: [] tags: [] type: bool testkey: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testkey see_also: [] tags: [] type: str testlkey: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testlkey see_also: [] tags: [] type: str testnewline: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testnewline see_also: [] tags: [] type: str name: selftest - can_run: true error_string: '' module_options: {} name: status - can_run: true error_string: '' module_options: address: default_value: unixgram:///tmp/telegraf.sock desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: address see_also: [] tags: [] type: str interval: default_value: '15' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: secs name: telegraf - can_run: true error_string: '' module_options: contact: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: contact see_also: [] tags: [] type: str description: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: description see_also: [] tags: [] type: str enabled: default_value: 'False' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: enabled see_also: [] tags: [] type: bool interval: default_value: '72' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '24' name: interval see_also: [] tags: [] type: int leaderboard: default_value: 'False' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: leaderboard see_also: [] tags: [] type: bool organization: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: organization see_also: [] tags: [] type: str proxy: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: proxy see_also: [] tags: [] type: str url: default_value: https://telemetry.ceph.com/report desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: url see_also: [] tags: [] type: str name: telemetry - can_run: true error_string: '' module_options: {} name: test_orchestrator - can_run: true error_string: '' module_options: {} name: volumes - can_run: true error_string: '' module_options: identifier: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: identifier see_also: [] tags: [] type: str interval: default_value: '60' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: secs zabbix_host: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_host see_also: [] tags: [] type: str zabbix_port: default_value: '10051' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_port see_also: [] tags: [] type: int zabbix_sender: default_value: /usr/bin/zabbix_sender desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_sender see_also: [] tags: [] type: str name: zabbix epoch: 2 modules: - iostat - restful services: {} standbys: [] monmap: created: '2019-04-17 11:25:04.256284' epoch: 1 features: optional: [] persistent: - kraken - luminous - mimic - osdmap-prune - nautilus fsid: b6d61d41-5c6b-4c17-98be-f37217166318 min_mon_release: 14 min_mon_release_name: nautilus modified: '2019-04-17 11:25:04.256284' mons: - addr: 10.1.24.17:6789/0 name: e24-h17-740xd public_addr: 10.1.24.17:6789/0 public_addrs: addrvec: - addr: 10.1.24.17:3300 nonce: 0 type: v2 - addr: 10.1.24.17:6789 nonce: 0 type: v1 rank: 0 - addr: 10.1.24.19:6789/0 name: e24-h19-740xd public_addr: 10.1.24.19:6789/0 public_addrs: addrvec: - addr: 10.1.24.19:3300 nonce: 0 type: v2 - addr: 10.1.24.19:6789 nonce: 0 type: v1 rank: 1 - addr: 10.1.24.21:6789/0 name: e24-h21-740xd public_addr: 10.1.24.21:6789/0 public_addrs: addrvec: - addr: 10.1.24.21:3300 nonce: 0 type: v2 - addr: 10.1.24.21:6789 nonce: 0 type: v1 rank: 2 osdmap: osdmap: epoch: 1 full: false nearfull: false num_in_osds: 0 num_osds: 0 num_remapped_pgs: 0 num_up_osds: 0 pgmap: bytes_avail: 0 bytes_total: 0 bytes_used: 0 data_bytes: 0 num_objects: 0 num_pgs: 0 num_pools: 0 pgs_by_state: [] progress_events: {} quorum: - 0 - 1 - 2 quorum_age: 38 quorum_names: - e24-h17-740xd - e24-h19-740xd - e24-h21-740xd servicemap: epoch: 1 modified: '0.000000' services: {} ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_current_status: election_epoch: 4 fsid: b6d61d41-5c6b-4c17-98be-f37217166318 fsmap: by_rank: [] epoch: 1 up:standby: 0 health: checks: {} status: HEALTH_OK mgrmap: active_addr: :/0 active_addrs: addrvec: [] active_change: '2019-04-17 11:25:45.308797' active_gid: 4284 active_name: e24-h17-740xd always_on_modules: nautilus: - balancer - crash - devicehealth - orchestrator_cli - progress - status - volumes available: false available_modules: - can_run: true error_string: '' module_options: password: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: password see_also: [] tags: [] type: str server_url: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_url see_also: [] tags: [] type: str username: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: username see_also: [] tags: [] type: str verify_server: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: verify_server see_also: [] tags: [] type: str name: ansible - can_run: true error_string: '' module_options: active: default_value: 'False' desc: automatically balance PGs across cluster enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: active see_also: [] tags: [] type: bool begin_time: default_value: '0000' desc: beginning time of day to automatically balance enum_allowed: [] flags: 1 level: advanced long_desc: This is a time of day in the format HHMM. max: '' min: '' name: begin_time see_also: [] tags: [] type: str begin_weekday: default_value: '0' desc: Restrict automatic balancing to this day of the week or later enum_allowed: [] flags: 1 level: advanced long_desc: 0 or 7 = Sunday, 1 = Monday, etc. max: '7' min: '0' name: begin_weekday see_also: [] tags: [] type: uint crush_compat_max_iterations: default_value: '25' desc: maximum number of iterations to attempt optimization enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '250' min: '1' name: crush_compat_max_iterations see_also: [] tags: [] type: uint crush_compat_metrics: default_value: pgs,objects,bytes desc: metrics with which to calculate OSD utilization enum_allowed: [] flags: 1 level: advanced long_desc: Value is a list of one or more of "pgs", "objects", or "bytes", and indicates which metrics to use to balance utilization. max: '' min: '' name: crush_compat_metrics see_also: [] tags: [] type: str crush_compat_step: default_value: '0.5' desc: aggressiveness of optimization enum_allowed: [] flags: 1 level: advanced long_desc: .99 is very aggressive, .01 is less aggressive max: '0.999' min: '0.001' name: crush_compat_step see_also: [] tags: [] type: float end_time: default_value: '2400' desc: ending time of day to automatically balance enum_allowed: [] flags: 1 level: advanced long_desc: This is a time of day in the format HHMM. max: '' min: '' name: end_time see_also: [] tags: [] type: str end_weekday: default_value: '7' desc: Restrict automatic balancing to days of the week earlier than this enum_allowed: [] flags: 1 level: advanced long_desc: 0 or 7 = Sunday, 1 = Monday, etc. max: '7' min: '0' name: end_weekday see_also: [] tags: [] type: uint min_score: default_value: '0' desc: minimum score, below which no optimization is attempted enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: min_score see_also: [] tags: [] type: float mode: default_value: none desc: Balancer mode enum_allowed: - crush-compat - none - upmap flags: 1 level: advanced long_desc: '' max: '' min: '' name: mode see_also: [] tags: [] type: str pool_ids: default_value: '' desc: pools which the automatic balancing will be limited to enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pool_ids see_also: [] tags: [] type: str sleep_interval: default_value: '60' desc: how frequently to wake up and attempt optimization enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: secs upmap_max_deviation: default_value: '0.01' desc: deviation below which no optimization is attempted enum_allowed: [] flags: 1 level: advanced long_desc: If the ratio between the fullest and least-full OSD is below this value then we stop trying to optimize placement. max: '1' min: '0' name: upmap_max_deviation see_also: [] tags: [] type: float upmap_max_iterations: default_value: '10' desc: maximum upmap optimization iterations enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: upmap_max_iterations see_also: [] tags: [] type: uint name: balancer - can_run: true error_string: '' module_options: {} name: crash - can_run: true error_string: '' module_options: salt_api_eauth: default_value: sharedsecret desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_eauth see_also: [] tags: [] type: str salt_api_password: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_password see_also: [] tags: [] type: str salt_api_url: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_url see_also: [] tags: [] type: str salt_api_username: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_username see_also: [] tags: [] type: str name: deepsea - can_run: true error_string: '' module_options: enable_monitoring: default_value: 'False' desc: monitor device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: enable_monitoring see_also: [] tags: [] type: bool mark_out_threshold: default_value: '2419200' desc: automatically mark OSD if it may fail before this long enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: mark_out_threshold see_also: [] tags: [] type: secs pool_name: default_value: device_health_metrics desc: name of pool in which to store device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pool_name see_also: [] tags: [] type: str retention_period: default_value: '15552000' desc: how long to retain device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: retention_period see_also: [] tags: [] type: secs scrape_frequency: default_value: '86400' desc: how frequently to scrape device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: scrape_frequency see_also: [] tags: [] type: secs self_heal: default_value: 'True' desc: preemptively heal cluster around devices that may fail enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: self_heal see_also: [] tags: [] type: bool sleep_interval: default_value: '600' desc: how frequently to wake up and check device health enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: secs warn_threshold: default_value: '7257600' desc: raise health warning if OSD may fail before this long enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: warn_threshold see_also: [] tags: [] type: secs name: devicehealth - can_run: false error_string: influxdb python module not found module_options: batch_size: default_value: '5000' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: batch_size see_also: [] tags: [] type: str database: default_value: ceph desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: database see_also: [] tags: [] type: str hostname: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: hostname see_also: [] tags: [] type: str interval: default_value: '30' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: str password: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: password see_also: [] tags: [] type: str port: default_value: '8086' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: port see_also: [] tags: [] type: str ssl: default_value: 'false' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: ssl see_also: [] tags: [] type: str threads: default_value: '5' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: threads see_also: [] tags: [] type: str username: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: username see_also: [] tags: [] type: str verify_ssl: default_value: 'true' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: verify_ssl see_also: [] tags: [] type: str name: influx - can_run: true error_string: '' module_options: {} name: insights - can_run: true error_string: '' module_options: {} name: iostat - can_run: true error_string: '' module_options: failure_domain: default_value: host desc: failure domain for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: failure_domain see_also: [] tags: [] type: str min_size: default_value: '' desc: default min_size for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: min_size see_also: [] tags: [] type: int num_rep: default_value: '3' desc: default replica count for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: num_rep see_also: [] tags: [] type: int pg_num: default_value: '128' desc: default pg_num for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pg_num see_also: [] tags: [] type: int prefix: default_value: '' desc: name prefix for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: prefix see_also: [] tags: [] type: str subtree: default_value: rack desc: CRUSH level for which to create a local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: subtree see_also: [] tags: [] type: str name: localpool - can_run: true error_string: '' module_options: orchestrator: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: orchestrator see_also: [] tags: [] type: str name: orchestrator_cli - can_run: true error_string: '' module_options: sleep_interval: default_value: '60' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: str name: pg_autoscaler - can_run: true error_string: '' module_options: max_completed_events: default_value: '50' desc: number of past completed events to remember enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: max_completed_events see_also: [] tags: [] type: int persist_interval: default_value: '5' desc: how frequently to persist completed events enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: persist_interval see_also: [] tags: [] type: secs name: progress - can_run: true error_string: '' module_options: rbd_stats_pools: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rbd_stats_pools see_also: [] tags: [] type: str rbd_stats_pools_refresh_interval: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rbd_stats_pools_refresh_interval see_also: [] tags: [] type: str scrape_interval: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: scrape_interval see_also: [] tags: [] type: str server_addr: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_addr see_also: [] tags: [] type: str server_port: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_port see_also: [] tags: [] type: str name: prometheus - can_run: true error_string: '' module_options: {} name: rbd_support - can_run: true error_string: '' module_options: key_file: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: key_file see_also: [] tags: [] type: str server_addr: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_addr see_also: [] tags: [] type: str server_port: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_port see_also: [] tags: [] type: str name: restful - can_run: true error_string: '' module_options: roption1: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: roption1 see_also: [] tags: [] type: str roption2: default_value: xyz desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: roption2 see_also: [] tags: [] type: str rwoption1: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption1 see_also: [] tags: [] type: str rwoption2: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption2 see_also: [] tags: [] type: int rwoption3: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption3 see_also: [] tags: [] type: float rwoption4: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption4 see_also: [] tags: [] type: str rwoption5: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption5 see_also: [] tags: [] type: bool rwoption6: default_value: 'True' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption6 see_also: [] tags: [] type: bool testkey: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testkey see_also: [] tags: [] type: str testlkey: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testlkey see_also: [] tags: [] type: str testnewline: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testnewline see_also: [] tags: [] type: str name: selftest - can_run: true error_string: '' module_options: {} name: status - can_run: true error_string: '' module_options: address: default_value: unixgram:///tmp/telegraf.sock desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: address see_also: [] tags: [] type: str interval: default_value: '15' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: secs name: telegraf - can_run: true error_string: '' module_options: contact: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: contact see_also: [] tags: [] type: str description: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: description see_also: [] tags: [] type: str enabled: default_value: 'False' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: enabled see_also: [] tags: [] type: bool interval: default_value: '72' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '24' name: interval see_also: [] tags: [] type: int leaderboard: default_value: 'False' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: leaderboard see_also: [] tags: [] type: bool organization: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: organization see_also: [] tags: [] type: str proxy: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: proxy see_also: [] tags: [] type: str url: default_value: https://telemetry.ceph.com/report desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: url see_also: [] tags: [] type: str name: telemetry - can_run: true error_string: '' module_options: {} name: test_orchestrator - can_run: true error_string: '' module_options: {} name: volumes - can_run: true error_string: '' module_options: identifier: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: identifier see_also: [] tags: [] type: str interval: default_value: '60' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: secs zabbix_host: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_host see_also: [] tags: [] type: str zabbix_port: default_value: '10051' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_port see_also: [] tags: [] type: int zabbix_sender: default_value: /usr/bin/zabbix_sender desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_sender see_also: [] tags: [] type: str name: zabbix epoch: 2 modules: - iostat - restful services: {} standbys: [] monmap: created: '2019-04-17 11:25:04.256284' epoch: 1 features: optional: [] persistent: - kraken - luminous - mimic - osdmap-prune - nautilus fsid: b6d61d41-5c6b-4c17-98be-f37217166318 min_mon_release: 14 min_mon_release_name: nautilus modified: '2019-04-17 11:25:04.256284' mons: - addr: 10.1.24.17:6789/0 name: e24-h17-740xd public_addr: 10.1.24.17:6789/0 public_addrs: addrvec: - addr: 10.1.24.17:3300 nonce: 0 type: v2 - addr: 10.1.24.17:6789 nonce: 0 type: v1 rank: 0 - addr: 10.1.24.19:6789/0 name: e24-h19-740xd public_addr: 10.1.24.19:6789/0 public_addrs: addrvec: - addr: 10.1.24.19:3300 nonce: 0 type: v2 - addr: 10.1.24.19:6789 nonce: 0 type: v1 rank: 1 - addr: 10.1.24.21:6789/0 name: e24-h21-740xd public_addr: 10.1.24.21:6789/0 public_addrs: addrvec: - addr: 10.1.24.21:3300 nonce: 0 type: v2 - addr: 10.1.24.21:6789 nonce: 0 type: v1 rank: 2 osdmap: osdmap: epoch: 1 full: false nearfull: false num_in_osds: 0 num_osds: 0 num_remapped_pgs: 0 num_up_osds: 0 pgmap: bytes_avail: 0 bytes_total: 0 bytes_used: 0 data_bytes: 0 num_objects: 0 num_pgs: 0 num_pools: 0 pgs_by_state: [] progress_events: {} quorum: - 0 - 1 - 2 quorum_age: 38 quorum_names: - e24-h17-740xd - e24-h19-740xd - e24-h21-740xd servicemap: epoch: 1 modified: '0.000000' services: {} ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_current_status: election_epoch: 4 fsid: b6d61d41-5c6b-4c17-98be-f37217166318 fsmap: by_rank: [] epoch: 1 up:standby: 0 health: checks: {} status: HEALTH_OK mgrmap: active_addr: :/0 active_addrs: addrvec: [] active_change: '2019-04-17 11:25:45.308797' active_gid: 4284 active_name: e24-h17-740xd always_on_modules: nautilus: - balancer - crash - devicehealth - orchestrator_cli - progress - status - volumes available: false available_modules: - can_run: true error_string: '' module_options: password: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: password see_also: [] tags: [] type: str server_url: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_url see_also: [] tags: [] type: str username: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: username see_also: [] tags: [] type: str verify_server: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: verify_server see_also: [] tags: [] type: str name: ansible - can_run: true error_string: '' module_options: active: default_value: 'False' desc: automatically balance PGs across cluster enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: active see_also: [] tags: [] type: bool begin_time: default_value: '0000' desc: beginning time of day to automatically balance enum_allowed: [] flags: 1 level: advanced long_desc: This is a time of day in the format HHMM. max: '' min: '' name: begin_time see_also: [] tags: [] type: str begin_weekday: default_value: '0' desc: Restrict automatic balancing to this day of the week or later enum_allowed: [] flags: 1 level: advanced long_desc: 0 or 7 = Sunday, 1 = Monday, etc. max: '7' min: '0' name: begin_weekday see_also: [] tags: [] type: uint crush_compat_max_iterations: default_value: '25' desc: maximum number of iterations to attempt optimization enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '250' min: '1' name: crush_compat_max_iterations see_also: [] tags: [] type: uint crush_compat_metrics: default_value: pgs,objects,bytes desc: metrics with which to calculate OSD utilization enum_allowed: [] flags: 1 level: advanced long_desc: Value is a list of one or more of "pgs", "objects", or "bytes", and indicates which metrics to use to balance utilization. max: '' min: '' name: crush_compat_metrics see_also: [] tags: [] type: str crush_compat_step: default_value: '0.5' desc: aggressiveness of optimization enum_allowed: [] flags: 1 level: advanced long_desc: .99 is very aggressive, .01 is less aggressive max: '0.999' min: '0.001' name: crush_compat_step see_also: [] tags: [] type: float end_time: default_value: '2400' desc: ending time of day to automatically balance enum_allowed: [] flags: 1 level: advanced long_desc: This is a time of day in the format HHMM. max: '' min: '' name: end_time see_also: [] tags: [] type: str end_weekday: default_value: '7' desc: Restrict automatic balancing to days of the week earlier than this enum_allowed: [] flags: 1 level: advanced long_desc: 0 or 7 = Sunday, 1 = Monday, etc. max: '7' min: '0' name: end_weekday see_also: [] tags: [] type: uint min_score: default_value: '0' desc: minimum score, below which no optimization is attempted enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: min_score see_also: [] tags: [] type: float mode: default_value: none desc: Balancer mode enum_allowed: - crush-compat - none - upmap flags: 1 level: advanced long_desc: '' max: '' min: '' name: mode see_also: [] tags: [] type: str pool_ids: default_value: '' desc: pools which the automatic balancing will be limited to enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pool_ids see_also: [] tags: [] type: str sleep_interval: default_value: '60' desc: how frequently to wake up and attempt optimization enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: secs upmap_max_deviation: default_value: '0.01' desc: deviation below which no optimization is attempted enum_allowed: [] flags: 1 level: advanced long_desc: If the ratio between the fullest and least-full OSD is below this value then we stop trying to optimize placement. max: '1' min: '0' name: upmap_max_deviation see_also: [] tags: [] type: float upmap_max_iterations: default_value: '10' desc: maximum upmap optimization iterations enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: upmap_max_iterations see_also: [] tags: [] type: uint name: balancer - can_run: true error_string: '' module_options: {} name: crash - can_run: true error_string: '' module_options: salt_api_eauth: default_value: sharedsecret desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_eauth see_also: [] tags: [] type: str salt_api_password: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_password see_also: [] tags: [] type: str salt_api_url: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_url see_also: [] tags: [] type: str salt_api_username: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: salt_api_username see_also: [] tags: [] type: str name: deepsea - can_run: true error_string: '' module_options: enable_monitoring: default_value: 'False' desc: monitor device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: enable_monitoring see_also: [] tags: [] type: bool mark_out_threshold: default_value: '2419200' desc: automatically mark OSD if it may fail before this long enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: mark_out_threshold see_also: [] tags: [] type: secs pool_name: default_value: device_health_metrics desc: name of pool in which to store device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pool_name see_also: [] tags: [] type: str retention_period: default_value: '15552000' desc: how long to retain device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: retention_period see_also: [] tags: [] type: secs scrape_frequency: default_value: '86400' desc: how frequently to scrape device health metrics enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: scrape_frequency see_also: [] tags: [] type: secs self_heal: default_value: 'True' desc: preemptively heal cluster around devices that may fail enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: self_heal see_also: [] tags: [] type: bool sleep_interval: default_value: '600' desc: how frequently to wake up and check device health enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: secs warn_threshold: default_value: '7257600' desc: raise health warning if OSD may fail before this long enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: warn_threshold see_also: [] tags: [] type: secs name: devicehealth - can_run: false error_string: influxdb python module not found module_options: batch_size: default_value: '5000' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: batch_size see_also: [] tags: [] type: str database: default_value: ceph desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: database see_also: [] tags: [] type: str hostname: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: hostname see_also: [] tags: [] type: str interval: default_value: '30' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: str password: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: password see_also: [] tags: [] type: str port: default_value: '8086' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: port see_also: [] tags: [] type: str ssl: default_value: 'false' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: ssl see_also: [] tags: [] type: str threads: default_value: '5' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: threads see_also: [] tags: [] type: str username: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: username see_also: [] tags: [] type: str verify_ssl: default_value: 'true' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: verify_ssl see_also: [] tags: [] type: str name: influx - can_run: true error_string: '' module_options: {} name: insights - can_run: true error_string: '' module_options: {} name: iostat - can_run: true error_string: '' module_options: failure_domain: default_value: host desc: failure domain for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: failure_domain see_also: [] tags: [] type: str min_size: default_value: '' desc: default min_size for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: min_size see_also: [] tags: [] type: int num_rep: default_value: '3' desc: default replica count for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: num_rep see_also: [] tags: [] type: int pg_num: default_value: '128' desc: default pg_num for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: pg_num see_also: [] tags: [] type: int prefix: default_value: '' desc: name prefix for any created local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: prefix see_also: [] tags: [] type: str subtree: default_value: rack desc: CRUSH level for which to create a local pool enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: subtree see_also: [] tags: [] type: str name: localpool - can_run: true error_string: '' module_options: orchestrator: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: orchestrator see_also: [] tags: [] type: str name: orchestrator_cli - can_run: true error_string: '' module_options: sleep_interval: default_value: '60' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: sleep_interval see_also: [] tags: [] type: str name: pg_autoscaler - can_run: true error_string: '' module_options: max_completed_events: default_value: '50' desc: number of past completed events to remember enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: max_completed_events see_also: [] tags: [] type: int persist_interval: default_value: '5' desc: how frequently to persist completed events enum_allowed: [] flags: 1 level: advanced long_desc: '' max: '' min: '' name: persist_interval see_also: [] tags: [] type: secs name: progress - can_run: true error_string: '' module_options: rbd_stats_pools: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rbd_stats_pools see_also: [] tags: [] type: str rbd_stats_pools_refresh_interval: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rbd_stats_pools_refresh_interval see_also: [] tags: [] type: str scrape_interval: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: scrape_interval see_also: [] tags: [] type: str server_addr: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_addr see_also: [] tags: [] type: str server_port: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_port see_also: [] tags: [] type: str name: prometheus - can_run: true error_string: '' module_options: {} name: rbd_support - can_run: true error_string: '' module_options: key_file: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: key_file see_also: [] tags: [] type: str server_addr: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_addr see_also: [] tags: [] type: str server_port: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: server_port see_also: [] tags: [] type: str name: restful - can_run: true error_string: '' module_options: roption1: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: roption1 see_also: [] tags: [] type: str roption2: default_value: xyz desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: roption2 see_also: [] tags: [] type: str rwoption1: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption1 see_also: [] tags: [] type: str rwoption2: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption2 see_also: [] tags: [] type: int rwoption3: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption3 see_also: [] tags: [] type: float rwoption4: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption4 see_also: [] tags: [] type: str rwoption5: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption5 see_also: [] tags: [] type: bool rwoption6: default_value: 'True' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: rwoption6 see_also: [] tags: [] type: bool testkey: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testkey see_also: [] tags: [] type: str testlkey: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testlkey see_also: [] tags: [] type: str testnewline: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: testnewline see_also: [] tags: [] type: str name: selftest - can_run: true error_string: '' module_options: {} name: status - can_run: true error_string: '' module_options: address: default_value: unixgram:///tmp/telegraf.sock desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: address see_also: [] tags: [] type: str interval: default_value: '15' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: secs name: telegraf - can_run: true error_string: '' module_options: contact: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: contact see_also: [] tags: [] type: str description: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: description see_also: [] tags: [] type: str enabled: default_value: 'False' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: enabled see_also: [] tags: [] type: bool interval: default_value: '72' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '24' name: interval see_also: [] tags: [] type: int leaderboard: default_value: 'False' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: leaderboard see_also: [] tags: [] type: bool organization: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: organization see_also: [] tags: [] type: str proxy: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: proxy see_also: [] tags: [] type: str url: default_value: https://telemetry.ceph.com/report desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: url see_also: [] tags: [] type: str name: telemetry - can_run: true error_string: '' module_options: {} name: test_orchestrator - can_run: true error_string: '' module_options: {} name: volumes - can_run: true error_string: '' module_options: identifier: default_value: '' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: identifier see_also: [] tags: [] type: str interval: default_value: '60' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: interval see_also: [] tags: [] type: secs zabbix_host: default_value: None desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_host see_also: [] tags: [] type: str zabbix_port: default_value: '10051' desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_port see_also: [] tags: [] type: int zabbix_sender: default_value: /usr/bin/zabbix_sender desc: '' enum_allowed: [] flags: 0 level: advanced long_desc: '' max: '' min: '' name: zabbix_sender see_also: [] tags: [] type: str name: zabbix epoch: 2 modules: - iostat - restful services: {} standbys: [] monmap: created: '2019-04-17 11:25:04.256284' epoch: 1 features: optional: [] persistent: - kraken - luminous - mimic - osdmap-prune - nautilus fsid: b6d61d41-5c6b-4c17-98be-f37217166318 min_mon_release: 14 min_mon_release_name: nautilus modified: '2019-04-17 11:25:04.256284' mons: - addr: 10.1.24.17:6789/0 name: e24-h17-740xd public_addr: 10.1.24.17:6789/0 public_addrs: addrvec: - addr: 10.1.24.17:3300 nonce: 0 type: v2 - addr: 10.1.24.17:6789 nonce: 0 type: v1 rank: 0 - addr: 10.1.24.19:6789/0 name: e24-h19-740xd public_addr: 10.1.24.19:6789/0 public_addrs: addrvec: - addr: 10.1.24.19:3300 nonce: 0 type: v2 - addr: 10.1.24.19:6789 nonce: 0 type: v1 rank: 1 - addr: 10.1.24.21:6789/0 name: e24-h21-740xd public_addr: 10.1.24.21:6789/0 public_addrs: addrvec: - addr: 10.1.24.21:3300 nonce: 0 type: v2 - addr: 10.1.24.21:6789 nonce: 0 type: v1 rank: 2 osdmap: osdmap: epoch: 1 full: false nearfull: false num_in_osds: 0 num_osds: 0 num_remapped_pgs: 0 num_up_osds: 0 pgmap: bytes_avail: 0 bytes_total: 0 bytes_used: 0 data_bytes: 0 num_objects: 0 num_pgs: 0 num_pools: 0 pgs_by_state: [] progress_events: {} quorum: - 0 - 1 - 2 quorum_age: 38 quorum_names: - e24-h17-740xd - e24-h19-740xd - e24-h21-740xd servicemap: epoch: 1 modified: '0.000000' services: {} TASK [ceph-facts : set_fact fsid from ceph_current_status] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:91 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.370) 0:02:13.466 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: fsid: b6d61d41-5c6b-4c17-98be-f37217166318 ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: fsid: b6d61d41-5c6b-4c17-98be-f37217166318 ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: fsid: b6d61d41-5c6b-4c17-98be-f37217166318 ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: fsid: b6d61d41-5c6b-4c17-98be-f37217166318 TASK [ceph-facts : generate cluster fsid] ******************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:98 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.131) 0:02:13.598 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact fsid] *************************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:105 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.038) 0:02:13.636 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact mds_name ansible_hostname] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:113 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.110) 0:02:13.747 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: mds_name: e23-h05-740xd ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: mds_name: e24-h05-740xd ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: mds_name: e24-h07-740xd ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: mds_name: e23-h07-740xd TASK [ceph-facts : set_fact mds_name ansible_fqdn] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:119 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.128) 0:02:13.875 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rbd_client_directory_owner ceph] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:125 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.115) 0:02:13.990 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rbd_client_directory_group rbd_client_directory_group] ************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:132 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.111) 0:02:14.102 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rbd_client_directory_mode 0770] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:139 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.109) 0:02:14.211 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : resolve device link(s)] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:146 Wednesday 17 April 2019 11:25:47 +0000 (0:00:00.111) 0:02:14.323 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme0n1) => changed=false item: /dev/nvme0n1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme1n1) => changed=false item: /dev/nvme1n1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme2n1) => changed=false item: /dev/nvme2n1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme0n1) => changed=false item: /dev/nvme0n1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme3n1) => changed=false item: /dev/nvme3n1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme1n1) => changed=false item: /dev/nvme1n1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme4n1) => changed=false item: /dev/nvme4n1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme2n1) => changed=false item: /dev/nvme2n1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme3n1) => changed=false item: /dev/nvme3n1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme0n1) => changed=false item: /dev/nvme0n1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme4n1) => changed=false item: /dev/nvme4n1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme1n1) => changed=false item: /dev/nvme1n1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme2n1) => changed=false item: /dev/nvme2n1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme3n1) => changed=false item: /dev/nvme3n1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme0n1) => changed=false item: /dev/nvme0n1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme1n1) => changed=false item: /dev/nvme1n1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme4n1) => changed=false item: /dev/nvme4n1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme2n1) => changed=false item: /dev/nvme2n1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme3n1) => changed=false item: /dev/nvme3n1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme4n1) => changed=false item: /dev/nvme4n1 skip_reason: Conditional result was False TASK [ceph-facts : set_fact build devices from resolved symlinks] ******************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:157 Wednesday 17 April 2019 11:25:48 +0000 (0:00:00.162) 0:02:14.486 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme0n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme0n1'}) => changed=false item: changed: false item: /dev/nvme0n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme1n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme1n1'}) => changed=false item: changed: false item: /dev/nvme1n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme2n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme2n1'}) => changed=false item: changed: false item: /dev/nvme2n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme0n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme0n1'}) => changed=false item: changed: false item: /dev/nvme0n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme3n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme3n1'}) => changed=false item: changed: false item: /dev/nvme3n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme1n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme1n1'}) => changed=false item: changed: false item: /dev/nvme1n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme4n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme4n1'}) => changed=false item: changed: false item: /dev/nvme4n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme2n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme2n1'}) => changed=false item: changed: false item: /dev/nvme2n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme0n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme0n1'}) => changed=false item: changed: false item: /dev/nvme0n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme3n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme3n1'}) => changed=false item: changed: false item: /dev/nvme3n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme1n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme1n1'}) => changed=false item: changed: false item: /dev/nvme1n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme4n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme4n1'}) => changed=false item: changed: false item: /dev/nvme4n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme2n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme2n1'}) => changed=false item: changed: false item: /dev/nvme2n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme0n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme0n1'}) => changed=false item: changed: false item: /dev/nvme0n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme3n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme3n1'}) => changed=false item: changed: false item: /dev/nvme3n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme4n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme4n1'}) => changed=false item: changed: false item: /dev/nvme4n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme1n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme1n1'}) => changed=false item: changed: false item: /dev/nvme1n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme2n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme2n1'}) => changed=false item: changed: false item: /dev/nvme2n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme3n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme3n1'}) => changed=false item: changed: false item: /dev/nvme3n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': '/dev/nvme4n1', '_ansible_item_result': True, '_ansible_ignore_errors': None, '_ansible_item_label': '/dev/nvme4n1'}) => changed=false item: changed: false item: /dev/nvme4n1 skip_reason: Conditional result was False skipped: true skip_reason: Conditional result was False TASK [ceph-facts : set_fact build final devices list] ******************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:167 Wednesday 17 April 2019 11:25:48 +0000 (0:00:00.168) 0:02:14.654 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact devices generate device list when osd_auto_discovery] *************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:176 Wednesday 17 April 2019 11:25:48 +0000 (0:00:00.114) 0:02:14.769 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-1', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e23--h05--740xd-swap', 'dm-uuid-LVM-KTm1KdKUKOHDkJWzRCj3Q0v0SM3mE9Fa83be0foapGdZcNHwF978WCMHOQk80SKs'], 'uuids': ['03ff6531-787c-4726-9a1b-512865e75946'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00ae8b83b86e5aeb2200f23a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '8388608', 'sectorsize': '512', 'size': '4.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-1 value: holders: [] host: '' links: ids: - dm-name-rhel_e23--h05--740xd-swap - dm-uuid-LVM-KTm1KdKUKOHDkJWzRCj3Q0v0SM3mE9Fa83be0foapGdZcNHwF978WCMHOQk80SKs labels: [] masters: [] uuids: - 03ff6531-787c-4726-9a1b-512865e75946 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '8388608' sectorsize: '512' serial: 00ae8b83b86e5aeb2200f23a59604609 size: 4.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme0n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200360', 'nvme-eui.333959304b2003600025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme0n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200360 - nvme-eui.333959304b2003600025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme3n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200362', 'nvme-eui.333959304b2003620025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme3n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200362 - nvme-eui.333959304b2003620025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme2n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200282', 'nvme-eui.333959304b2002820025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme2n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200282 - nvme-eui.333959304b2002820025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-2', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e23--h05--740xd-home', 'dm-uuid-LVM-KTm1KdKUKOHDkJWzRCj3Q0v0SM3mE9Fa5PK1Zq33SMazG3hggYXzKwiw9BjHuWgL'], 'uuids': ['362124e7-e989-488e-8b01-3daba39b8369'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00ae8b83b86e5aeb2200f23a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '821288960', 'sectorsize': '512', 'size': '391.62 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-2 value: holders: [] host: '' links: ids: - dm-name-rhel_e23--h05--740xd-home - dm-uuid-LVM-KTm1KdKUKOHDkJWzRCj3Q0v0SM3mE9Fa5PK1Zq33SMazG3hggYXzKwiw9BjHuWgL labels: [] masters: [] uuids: - 362124e7-e989-488e-8b01-3daba39b8369 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '821288960' sectorsize: '512' serial: 00ae8b83b86e5aeb2200f23a59604609 size: 391.62 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-0', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e23--h05--740xd-root', 'dm-uuid-LVM-KTm1KdKUKOHDkJWzRCj3Q0v0SM3mE9FaKvat9iUehrITIvmT0PHS5t1YPy4KOMpe'], 'uuids': ['79f2e03c-d36f-4635-b497-3c2ae0301d40'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00ae8b83b86e5aeb2200f23a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '104857600', 'sectorsize': '512', 'size': '50.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-0 value: holders: [] host: '' links: ids: - dm-name-rhel_e23--h05--740xd-root - dm-uuid-LVM-KTm1KdKUKOHDkJWzRCj3Q0v0SM3mE9FaKvat9iUehrITIvmT0PHS5t1YPy4KOMpe labels: [] masters: [] uuids: - 79f2e03c-d36f-4635-b497-3c2ae0301d40 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '104857600' sectorsize: '512' serial: 00ae8b83b86e5aeb2200f23a59604609 size: 50.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-1', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h05--740xd-swap', 'dm-uuid-LVM-K0UgaB7rYJjfeF0StounUfKCI3U2RGcTQmdGo1ldOz1ygNvi86HxLU6fHmjNLRe6'], 'uuids': ['1e952bda-f45d-4bfa-81aa-3ae675448a73'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00a102440ca75eeb2200df3a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '8388608', 'sectorsize': '512', 'size': '4.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-1 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h05--740xd-swap - dm-uuid-LVM-K0UgaB7rYJjfeF0StounUfKCI3U2RGcTQmdGo1ldOz1ygNvi86HxLU6fHmjNLRe6 labels: [] masters: [] uuids: - 1e952bda-f45d-4bfa-81aa-3ae675448a73 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '8388608' sectorsize: '512' serial: 00a102440ca75eeb2200df3a59604609 size: 4.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme1n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200364', 'nvme-eui.333959304b2003640025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme1n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200364 - nvme-eui.333959304b2003640025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme0n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200359', 'nvme-eui.333959304b2003590025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme0n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200359 - nvme-eui.333959304b2003590025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sda', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d094660593af20022eb5a6eb8838bae', 'wwn-0x6d094660593af20022eb5a6eb8838bae'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00ae8b83b86e5aeb2200f23a59604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d094660593af20022eb5a6eb8838bae', 'partitions': {'sda2': {'links': {'ids': ['lvm-pv-uuid-PTcBBO-UL1Q-VhdP-0HcP-47lM-u0co-JqtHmg', 'scsi-36d094660593af20022eb5a6eb8838bae-part2', 'wwn-0x6d094660593af20022eb5a6eb8838bae-part2'], 'uuids': [], 'labels': [], 'masters': ['dm-0', 'dm-1', 'dm-2']}, 'start': '2099200', 'sectors': '934541312', 'sectorsize': 512, 'size': '445.62 GB', 'uuid': None, 'holders': ['rhel_e23--h05--740xd-swap', 'rhel_e23--h05--740xd-home', 'rhel_e23--h05--740xd-root']}, 'sda1': {'links': {'ids': ['scsi-36d094660593af20022eb5a6eb8838bae-part1', 'wwn-0x6d094660593af20022eb5a6eb8838bae-part1'], 'uuids': ['e86c903a-fea6-481e-92fe-076925e24cd2'], 'labels': [], 'masters': []}, 'start': '2048', 'sectors': '2097152', 'sectorsize': 512, 'size': '1.00 GB', 'uuid': 'e86c903a-fea6-481e-92fe-076925e24cd2', 'holders': []}}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '936640512', 'sectorsize': '512', 'size': '446.62 GB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sda value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d094660593af20022eb5a6eb8838bae - wwn-0x6d094660593af20022eb5a6eb8838bae labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: sda1: holders: [] links: ids: - scsi-36d094660593af20022eb5a6eb8838bae-part1 - wwn-0x6d094660593af20022eb5a6eb8838bae-part1 labels: [] masters: [] uuids: - e86c903a-fea6-481e-92fe-076925e24cd2 sectors: '2097152' sectorsize: 512 size: 1.00 GB start: '2048' uuid: e86c903a-fea6-481e-92fe-076925e24cd2 sda2: holders: - rhel_e23--h05--740xd-swap - rhel_e23--h05--740xd-home - rhel_e23--h05--740xd-root links: ids: - lvm-pv-uuid-PTcBBO-UL1Q-VhdP-0HcP-47lM-u0co-JqtHmg - scsi-36d094660593af20022eb5a6eb8838bae-part2 - wwn-0x6d094660593af20022eb5a6eb8838bae-part2 labels: [] masters: - dm-0 - dm-1 - dm-2 uuids: [] sectors: '934541312' sectorsize: 512 size: 445.62 GB start: '2099200' uuid: null removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '936640512' sectorsize: '512' serial: 00ae8b83b86e5aeb2200f23a59604609 size: 446.62 GB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d094660593af20022eb5a6eb8838bae' skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme3n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200272', 'nvme-eui.333959304b2002720025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme3n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200272 - nvme-eui.333959304b2002720025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme4n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200507', 'nvme-eui.333959304b2005070025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme4n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200507 - nvme-eui.333959304b2005070025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme2n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200508', 'nvme-eui.333959304b2005080025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme2n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200508 - nvme-eui.333959304b2005080025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-2', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h05--740xd-home', 'dm-uuid-LVM-K0UgaB7rYJjfeF0StounUfKCI3U2RGcTCzL5hoLlhIMI3q9fXNeAvpXXToifhb9e'], 'uuids': ['f7e5a509-aeb2-4063-8bf7-ecfd7f6b16ca'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00a102440ca75eeb2200df3a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '821288960', 'sectorsize': '512', 'size': '391.62 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-2 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h05--740xd-home - dm-uuid-LVM-K0UgaB7rYJjfeF0StounUfKCI3U2RGcTCzL5hoLlhIMI3q9fXNeAvpXXToifhb9e labels: [] masters: [] uuids: - f7e5a509-aeb2-4063-8bf7-ecfd7f6b16ca model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '821288960' sectorsize: '512' serial: 00a102440ca75eeb2200df3a59604609 size: 391.62 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-0', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h05--740xd-root', 'dm-uuid-LVM-K0UgaB7rYJjfeF0StounUfKCI3U2RGcT7LGFd6Nqa90j4sWKXPJJMbw35TY39I0d'], 'uuids': ['c5b5e389-0999-493f-b155-e151c31dcb8a'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '00a102440ca75eeb2200df3a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '104857600', 'sectorsize': '512', 'size': '50.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-0 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h05--740xd-root - dm-uuid-LVM-K0UgaB7rYJjfeF0StounUfKCI3U2RGcT7LGFd6Nqa90j4sWKXPJJMbw35TY39I0d labels: [] masters: [] uuids: - c5b5e389-0999-493f-b155-e151c31dcb8a model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '104857600' sectorsize: '512' serial: 00a102440ca75eeb2200df3a59604609 size: 50.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme1n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200184', 'nvme-eui.333959304b2001840025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme1n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200184 - nvme-eui.333959304b2001840025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-1', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h07--740xd-swap', 'dm-uuid-LVM-442cDUgf2SsZ1kDybMX9YLoVrgtZeIClrAmzXC17xxe0ZtRbVDkjGCvjSWeoaI87'], 'uuids': ['5b641115-cd74-4ef7-9fcc-f8d097d98265'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '0062e83904a95eeb2200d5765b604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '8388608', 'sectorsize': '512', 'size': '4.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-1 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h07--740xd-swap - dm-uuid-LVM-442cDUgf2SsZ1kDybMX9YLoVrgtZeIClrAmzXC17xxe0ZtRbVDkjGCvjSWeoaI87 labels: [] masters: [] uuids: - 5b641115-cd74-4ef7-9fcc-f8d097d98265 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '8388608' sectorsize: '512' serial: 0062e83904a95eeb2200d5765b604609 size: 4.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sda', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d094660593adf0022eb5ea70c4402a1', 'wwn-0x6d094660593adf0022eb5ea70c4402a1'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '00a102440ca75eeb2200df3a59604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d094660593adf0022eb5ea70c4402a1', 'partitions': {'sda2': {'links': {'ids': ['lvm-pv-uuid-1AfcRD-xdYt-QtJX-x7wd-MLCO-jTLY-pmebhq', 'scsi-36d094660593adf0022eb5ea70c4402a1-part2', 'wwn-0x6d094660593adf0022eb5ea70c4402a1-part2'], 'uuids': [], 'labels': [], 'masters': ['dm-0', 'dm-1', 'dm-2']}, 'start': '2099200', 'sectors': '934541312', 'sectorsize': 512, 'size': '445.62 GB', 'uuid': None, 'holders': ['rhel_e24--h05--740xd-swap', 'rhel_e24--h05--740xd-home', 'rhel_e24--h05--740xd-root']}, 'sda1': {'links': {'ids': ['scsi-36d094660593adf0022eb5ea70c4402a1-part1', 'wwn-0x6d094660593adf0022eb5ea70c4402a1-part1'], 'uuids': ['d1e690f1-6f87-41f5-b6a6-5b7d85ad1a65'], 'labels': [], 'masters': []}, 'start': '2048', 'sectors': '2097152', 'sectorsize': 512, 'size': '1.00 GB', 'uuid': 'd1e690f1-6f87-41f5-b6a6-5b7d85ad1a65', 'holders': []}}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '936640512', 'sectorsize': '512', 'size': '446.62 GB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sda value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d094660593adf0022eb5ea70c4402a1 - wwn-0x6d094660593adf0022eb5ea70c4402a1 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: sda1: holders: [] links: ids: - scsi-36d094660593adf0022eb5ea70c4402a1-part1 - wwn-0x6d094660593adf0022eb5ea70c4402a1-part1 labels: [] masters: [] uuids: - d1e690f1-6f87-41f5-b6a6-5b7d85ad1a65 sectors: '2097152' sectorsize: 512 size: 1.00 GB start: '2048' uuid: d1e690f1-6f87-41f5-b6a6-5b7d85ad1a65 sda2: holders: - rhel_e24--h05--740xd-swap - rhel_e24--h05--740xd-home - rhel_e24--h05--740xd-root links: ids: - lvm-pv-uuid-1AfcRD-xdYt-QtJX-x7wd-MLCO-jTLY-pmebhq - scsi-36d094660593adf0022eb5ea70c4402a1-part2 - wwn-0x6d094660593adf0022eb5ea70c4402a1-part2 labels: [] masters: - dm-0 - dm-1 - dm-2 uuids: [] sectors: '934541312' sectorsize: 512 size: 445.62 GB start: '2099200' uuid: null removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '936640512' sectorsize: '512' serial: 00a102440ca75eeb2200df3a59604609 size: 446.62 GB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d094660593adf0022eb5ea70c4402a1' skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme0n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200496', 'nvme-eui.333959304b2004960025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme0n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200496 - nvme-eui.333959304b2004960025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme4n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200499', 'nvme-eui.333959304b2004990025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme4n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200499 - nvme-eui.333959304b2004990025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme3n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200276', 'nvme-eui.333959304b2002760025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme3n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200276 - nvme-eui.333959304b2002760025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme2n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200365', 'nvme-eui.333959304b2003650025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme2n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200365 - nvme-eui.333959304b2003650025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-2', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h07--740xd-home', 'dm-uuid-LVM-442cDUgf2SsZ1kDybMX9YLoVrgtZeIClYRW4ftQfpYhgZonGGkw46yFCmFDgGBAz'], 'uuids': ['51094b30-1ac0-45b3-bc23-7c0889e38701'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '0062e83904a95eeb2200d5765b604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '821288960', 'sectorsize': '512', 'size': '391.62 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-2 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h07--740xd-home - dm-uuid-LVM-442cDUgf2SsZ1kDybMX9YLoVrgtZeIClYRW4ftQfpYhgZonGGkw46yFCmFDgGBAz labels: [] masters: [] uuids: - 51094b30-1ac0-45b3-bc23-7c0889e38701 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '821288960' sectorsize: '512' serial: 0062e83904a95eeb2200d5765b604609 size: 391.62 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-0', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e24--h07--740xd-root', 'dm-uuid-LVM-442cDUgf2SsZ1kDybMX9YLoVrgtZeICl12R31efJRMutewOtj2XjIaCCg8yJIKJz'], 'uuids': ['2635fa18-fa94-4a67-9962-593817cee965'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '0062e83904a95eeb2200d5765b604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '104857600', 'sectorsize': '512', 'size': '50.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-0 value: holders: [] host: '' links: ids: - dm-name-rhel_e24--h07--740xd-root - dm-uuid-LVM-442cDUgf2SsZ1kDybMX9YLoVrgtZeICl12R31efJRMutewOtj2XjIaCCg8yJIKJz labels: [] masters: [] uuids: - 2635fa18-fa94-4a67-9962-593817cee965 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '104857600' sectorsize: '512' serial: 0062e83904a95eeb2200d5765b604609 size: 50.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme1n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200278', 'nvme-eui.333959304b2002780025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme1n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200278 - nvme-eui.333959304b2002780025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sda', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d0946605b76d50022eb5ea90439e862', 'wwn-0x6d0946605b76d50022eb5ea90439e862'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0062e83904a95eeb2200d5765b604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d0946605b76d50022eb5ea90439e862', 'partitions': {'sda2': {'links': {'ids': ['lvm-pv-uuid-1ediqi-Tc5a-2oA0-ivfh-2v0F-vCWC-dIrYBq', 'scsi-36d0946605b76d50022eb5ea90439e862-part2', 'wwn-0x6d0946605b76d50022eb5ea90439e862-part2'], 'uuids': [], 'labels': [], 'masters': ['dm-0', 'dm-1', 'dm-2']}, 'start': '2099200', 'sectors': '934541312', 'sectorsize': 512, 'size': '445.62 GB', 'uuid': None, 'holders': ['rhel_e24--h07--740xd-swap', 'rhel_e24--h07--740xd-home', 'rhel_e24--h07--740xd-root']}, 'sda1': {'links': {'ids': ['scsi-36d0946605b76d50022eb5ea90439e862-part1', 'wwn-0x6d0946605b76d50022eb5ea90439e862-part1'], 'uuids': ['2caa87d9-91ef-4c5f-9a61-376be182a2e2'], 'labels': [], 'masters': []}, 'start': '2048', 'sectors': '2097152', 'sectorsize': 512, 'size': '1.00 GB', 'uuid': '2caa87d9-91ef-4c5f-9a61-376be182a2e2', 'holders': []}}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '936640512', 'sectorsize': '512', 'size': '446.62 GB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sda value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d0946605b76d50022eb5ea90439e862 - wwn-0x6d0946605b76d50022eb5ea90439e862 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: sda1: holders: [] links: ids: - scsi-36d0946605b76d50022eb5ea90439e862-part1 - wwn-0x6d0946605b76d50022eb5ea90439e862-part1 labels: [] masters: [] uuids: - 2caa87d9-91ef-4c5f-9a61-376be182a2e2 sectors: '2097152' sectorsize: 512 size: 1.00 GB start: '2048' uuid: 2caa87d9-91ef-4c5f-9a61-376be182a2e2 sda2: holders: - rhel_e24--h07--740xd-swap - rhel_e24--h07--740xd-home - rhel_e24--h07--740xd-root links: ids: - lvm-pv-uuid-1ediqi-Tc5a-2oA0-ivfh-2v0F-vCWC-dIrYBq - scsi-36d0946605b76d50022eb5ea90439e862-part2 - wwn-0x6d0946605b76d50022eb5ea90439e862-part2 labels: [] masters: - dm-0 - dm-1 - dm-2 uuids: [] sectors: '934541312' sectorsize: 512 size: 445.62 GB start: '2099200' uuid: null removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '936640512' sectorsize: '512' serial: 0062e83904a95eeb2200d5765b604609 size: 446.62 GB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d0946605b76d50022eb5ea90439e862' skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme4n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200366', 'nvme-eui.333959304b2003660025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme4n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200366 - nvme-eui.333959304b2003660025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-1', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e23--h07--740xd-swap', 'dm-uuid-LVM-3yaE0F4d0F9qdNjMMWBtY2LMI24hF8gRdjpmiBngf9SYx0OWdmismG84cF1vC1Ks'], 'uuids': ['6db506e9-a725-467e-b0bf-704339ade05f'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '0018b87c0ba85eeb2200c53a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '8388608', 'sectorsize': '512', 'size': '4.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-1 value: holders: [] host: '' links: ids: - dm-name-rhel_e23--h07--740xd-swap - dm-uuid-LVM-3yaE0F4d0F9qdNjMMWBtY2LMI24hF8gRdjpmiBngf9SYx0OWdmismG84cF1vC1Ks labels: [] masters: [] uuids: - 6db506e9-a725-467e-b0bf-704339ade05f model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '8388608' sectorsize: '512' serial: 0018b87c0ba85eeb2200c53a59604609 size: 4.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme0n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200367', 'nvme-eui.333959304b2003670025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme0n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200367 - nvme-eui.333959304b2003670025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme3n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200245', 'nvme-eui.333959304b2002450025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme3n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200245 - nvme-eui.333959304b2002450025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme2n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200275', 'nvme-eui.333959304b2002750025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme2n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200275 - nvme-eui.333959304b2002750025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-2', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e23--h07--740xd-home', 'dm-uuid-LVM-3yaE0F4d0F9qdNjMMWBtY2LMI24hF8gRr9FJKliDez5cqadzztq4A514O1iXZax5'], 'uuids': ['fc0453c8-713d-4c27-a5d3-653d138ab5b6'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '0018b87c0ba85eeb2200c53a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '821288960', 'sectorsize': '512', 'size': '391.62 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-2 value: holders: [] host: '' links: ids: - dm-name-rhel_e23--h07--740xd-home - dm-uuid-LVM-3yaE0F4d0F9qdNjMMWBtY2LMI24hF8gRr9FJKliDez5cqadzztq4A514O1iXZax5 labels: [] masters: [] uuids: - fc0453c8-713d-4c27-a5d3-653d138ab5b6 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '821288960' sectorsize: '512' serial: 0018b87c0ba85eeb2200c53a59604609 size: 391.62 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'dm-0', 'value': {'virtual': 1, 'links': {'ids': ['dm-name-rhel_e23--h07--740xd-root', 'dm-uuid-LVM-3yaE0F4d0F9qdNjMMWBtY2LMI24hF8gRwsP4gO968B2Xg2efVa2YFsjoaJFX2Is0'], 'uuids': ['b5da91df-d190-4b3f-8d21-696e4036d8a8'], 'labels': [], 'masters': []}, 'vendor': None, 'model': None, 'sas_address': None, 'sas_device_handle': None, 'serial': '0018b87c0ba85eeb2200c53a59604609', 'removable': '0', 'support_discard': '0', 'partitions': {}, 'rotational': '1', 'scheduler_mode': '', 'sectors': '104857600', 'sectorsize': '512', 'size': '50.00 GB', 'host': '', 'holders': []}}) => changed=false item: key: dm-0 value: holders: [] host: '' links: ids: - dm-name-rhel_e23--h07--740xd-root - dm-uuid-LVM-3yaE0F4d0F9qdNjMMWBtY2LMI24hF8gRwsP4gO968B2Xg2efVa2YFsjoaJFX2Is0 labels: [] masters: [] uuids: - b5da91df-d190-4b3f-8d21-696e4036d8a8 model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: '' sectors: '104857600' sectorsize: '512' serial: 0018b87c0ba85eeb2200c53a59604609 size: 50.00 GB support_discard: '0' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme1n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200372', 'nvme-eui.333959304b2003720025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme1n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200372 - nvme-eui.333959304b2003720025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'sda', 'value': {'virtual': 1, 'links': {'ids': ['scsi-36d094660593ac50022eb5ea80b7cb818', 'wwn-0x6d094660593ac50022eb5ea80b7cb818'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': 'DELL', 'model': 'PERC H740P Adp', 'sas_address': None, 'sas_device_handle': None, 'serial': '0018b87c0ba85eeb2200c53a59604609', 'removable': '0', 'support_discard': '0', 'wwn': '0x6d094660593ac50022eb5ea80b7cb818', 'partitions': {'sda2': {'links': {'ids': ['lvm-pv-uuid-t9kfnk-Wanh-hRO0-o0BW-pyfK-o3kJ-C0TwO1', 'scsi-36d094660593ac50022eb5ea80b7cb818-part2', 'wwn-0x6d094660593ac50022eb5ea80b7cb818-part2'], 'uuids': [], 'labels': [], 'masters': ['dm-0', 'dm-1', 'dm-2']}, 'start': '2099200', 'sectors': '934541312', 'sectorsize': 512, 'size': '445.62 GB', 'uuid': None, 'holders': ['rhel_e23--h07--740xd-swap', 'rhel_e23--h07--740xd-home', 'rhel_e23--h07--740xd-root']}, 'sda1': {'links': {'ids': ['scsi-36d094660593ac50022eb5ea80b7cb818-part1', 'wwn-0x6d094660593ac50022eb5ea80b7cb818-part1'], 'uuids': ['fca62056-4ead-4b46-a6ed-ddeee9dbccf8'], 'labels': [], 'masters': []}, 'start': '2048', 'sectors': '2097152', 'sectorsize': 512, 'size': '1.00 GB', 'uuid': 'fca62056-4ead-4b46-a6ed-ddeee9dbccf8', 'holders': []}}, 'rotational': '1', 'scheduler_mode': 'mq-deadline', 'sectors': '936640512', 'sectorsize': '512', 'size': '446.62 GB', 'host': 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)', 'holders': []}}) => changed=false item: key: sda value: holders: [] host: 'RAID bus controller: LSI Logic / Symbios Logic MegaRAID Tri-Mode SAS3508 (rev 01)' links: ids: - scsi-36d094660593ac50022eb5ea80b7cb818 - wwn-0x6d094660593ac50022eb5ea80b7cb818 labels: [] masters: [] uuids: [] model: PERC H740P Adp partitions: sda1: holders: [] links: ids: - scsi-36d094660593ac50022eb5ea80b7cb818-part1 - wwn-0x6d094660593ac50022eb5ea80b7cb818-part1 labels: [] masters: [] uuids: - fca62056-4ead-4b46-a6ed-ddeee9dbccf8 sectors: '2097152' sectorsize: 512 size: 1.00 GB start: '2048' uuid: fca62056-4ead-4b46-a6ed-ddeee9dbccf8 sda2: holders: - rhel_e23--h07--740xd-swap - rhel_e23--h07--740xd-home - rhel_e23--h07--740xd-root links: ids: - lvm-pv-uuid-t9kfnk-Wanh-hRO0-o0BW-pyfK-o3kJ-C0TwO1 - scsi-36d094660593ac50022eb5ea80b7cb818-part2 - wwn-0x6d094660593ac50022eb5ea80b7cb818-part2 labels: [] masters: - dm-0 - dm-1 - dm-2 uuids: [] sectors: '934541312' sectorsize: 512 size: 445.62 GB start: '2099200' uuid: null removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '936640512' sectorsize: '512' serial: 0018b87c0ba85eeb2200c53a59604609 size: 446.62 GB support_discard: '0' vendor: DELL virtual: 1 wwn: '0x6d094660593ac50022eb5ea80b7cb818' skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'key': 'nvme4n1', 'value': {'virtual': 1, 'links': {'ids': ['nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200238', 'nvme-eui.333959304b2002380025385800000002'], 'uuids': [], 'labels': [], 'masters': []}, 'vendor': None, 'model': 'Dell Express Flash PM1725a 800GB SFF', 'sas_address': None, 'sas_device_handle': None, 'removable': '0', 'support_discard': '512', 'partitions': {}, 'rotational': '0', 'scheduler_mode': 'none', 'sectors': '1562824368', 'sectorsize': '512', 'size': '745.21 GB', 'host': 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)', 'holders': []}}) => changed=false item: key: nvme4n1 value: holders: [] host: 'Non-Volatile memory controller: Samsung Electronics Co Ltd NVMe SSD Controller 172Xa/172Xb (rev 01)' links: ids: - nvme-Dell_Express_Flash_PM1725a_800GB_SFF__S39YNX0K200238 - nvme-eui.333959304b2002380025385800000002 labels: [] masters: [] uuids: [] model: Dell Express Flash PM1725a 800GB SFF partitions: {} removable: '0' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '1562824368' sectorsize: '512' size: 745.21 GB support_discard: '512' vendor: null virtual: 1 skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_uid for debian based system - non container] ******************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:189 Wednesday 17 April 2019 11:25:48 +0000 (0:00:00.196) 0:02:14.966 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_uid for red hat or suse based system - non container] ********************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:196 Wednesday 17 April 2019 11:25:48 +0000 (0:00:00.115) 0:02:15.082 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_uid: 167 ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_uid: 167 ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_uid: 167 ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_uid: 167 TASK [ceph-facts : set_fact ceph_uid for debian based system - container] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:203 Wednesday 17 April 2019 11:25:48 +0000 (0:00:00.126) 0:02:15.208 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_uid for red hat based system - container] ********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:210 Wednesday 17 April 2019 11:25:48 +0000 (0:00:00.107) 0:02:15.315 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact ceph_uid for red hat] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:218 Wednesday 17 April 2019 11:25:49 +0000 (0:00:00.108) 0:02:15.424 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rgw_hostname] ******************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:225 Wednesday 17 April 2019 11:25:49 +0000 (0:00:00.107) 0:02:15.531 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact osd_pool_default_pg_num] ******************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:240 Wednesday 17 April 2019 11:25:49 +0000 (0:00:00.115) 0:02:15.647 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_pg_num: '8' ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_pg_num: '8' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_pg_num: '8' ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_pg_num: '8' TASK [ceph-facts : set_fact osd_pool_default_size] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:244 Wednesday 17 April 2019 11:25:49 +0000 (0:00:00.126) 0:02:15.773 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_size: '3' ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_size: '3' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_size: '3' ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_size: '3' TASK [ceph-facts : set_fact osd_pool_default_min_size] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:248 Wednesday 17 April 2019 11:25:49 +0000 (0:00:00.123) 0:02:15.897 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_min_size: '0' ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_min_size: '0' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_min_size: '0' ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: osd_pool_default_min_size: '0' TASK [ceph-facts : check if the ceph conf exists] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:252 Wednesday 17 April 2019 11:25:49 +0000 (0:00:00.124) 0:02:16.021 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: exists: false ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: exists: false ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: exists: false ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false stat: exists: false TASK [ceph-facts : get default crush rule value from ceph configuration] ************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:257 Wednesday 17 April 2019 11:25:49 +0000 (0:00:00.253) 0:02:16.275 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact osd_pool_default_crush_rule] **************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:264 Wednesday 17 April 2019 11:25:49 +0000 (0:00:00.112) 0:02:16.387 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _monitor_address to monitor_address_block] ************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:2 Wednesday 17 April 2019 11:25:50 +0000 (0:00:00.109) 0:02:16.497 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _monitor_address to monitor_address] ******************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:12 Wednesday 17 April 2019 11:25:50 +0000 (0:00:00.131) 0:02:16.628 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _monitor_address to monitor_interface - ipv4] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:22 Wednesday 17 April 2019 11:25:50 +0000 (0:00:00.132) 0:02:16.761 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _monitor_address to monitor_interface - ipv6] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:34 Wednesday 17 April 2019 11:25:50 +0000 (0:00:00.136) 0:02:16.898 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h17-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h19-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=e24-h21-740xd.alias.bos.scalelab.redhat.com) => changed=false item: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _current_monitor_address] ******************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_monitor_address.yml:46 Wednesday 17 April 2019 11:25:50 +0000 (0:00:00.131) 0:02:17.029 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h17-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.17'}) => changed=false item: addr: 10.1.24.17 name: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h19-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.19'}) => changed=false item: addr: 10.1.24.19 name: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h21-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.21'}) => changed=false item: addr: 10.1.24.21 name: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h17-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.17'}) => changed=false item: addr: 10.1.24.17 name: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h19-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.19'}) => changed=false item: addr: 10.1.24.19 name: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h21-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.21'}) => changed=false item: addr: 10.1.24.21 name: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h17-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.17'}) => changed=false item: addr: 10.1.24.17 name: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h19-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.19'}) => changed=false item: addr: 10.1.24.19 name: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h21-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.21'}) => changed=false item: addr: 10.1.24.21 name: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h17-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.17'}) => changed=false item: addr: 10.1.24.17 name: e24-h17-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h19-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.19'}) => changed=false item: addr: 10.1.24.19 name: e24-h19-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'e24-h21-740xd.alias.bos.scalelab.redhat.com', 'addr': '10.1.24.21'}) => changed=false item: addr: 10.1.24.21 name: e24-h21-740xd.alias.bos.scalelab.redhat.com skip_reason: Conditional result was False TASK [ceph-facts : set_fact _radosgw_address to radosgw_address_block] ************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:2 Wednesday 17 April 2019 11:25:50 +0000 (0:00:00.127) 0:02:17.156 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _radosgw_address to radosgw_address] ******************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:9 Wednesday 17 April 2019 11:25:50 +0000 (0:00:00.113) 0:02:17.269 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _interface] ********************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:17 Wednesday 17 April 2019 11:25:50 +0000 (0:00:00.115) 0:02:17.385 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _radosgw_address to radosgw_interface - ipv4] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:21 Wednesday 17 April 2019 11:25:51 +0000 (0:00:00.111) 0:02:17.497 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact _radosgw_address to radosgw_interface - ipv6] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/set_radosgw_address.yml:27 Wednesday 17 April 2019 11:25:51 +0000 (0:00:00.111) 0:02:17.608 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set_fact rgw_instances] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:277 Wednesday 17 April 2019 11:25:51 +0000 (0:00:00.110) 0:02:17.719 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=0) => changed=false item: '0' skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=0) => changed=false item: '0' skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=0) => changed=false item: '0' skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=0) => changed=false item: '0' skip_reason: Conditional result was False TASK [ceph-facts : set ntp service name for Debian family] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:286 Wednesday 17 April 2019 11:25:51 +0000 (0:00:00.126) 0:02:17.845 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-facts : set ntp service name for Red Hat family] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-facts/tasks/facts.yml:291 Wednesday 17 April 2019 11:25:51 +0000 (0:00:00.112) 0:02:17.958 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ntp_service_name: ntpd ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ntp_service_name: ntpd ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ntp_service_name: ntpd ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ntp_service_name: ntpd TASK [ceph-handler : include check_running_containers.yml] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_running_cluster.yml:2 Wednesday 17 April 2019 11:25:51 +0000 (0:00:00.171) 0:02:18.130 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : include check_socket_non_container.yml] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_running_cluster.yml:7 Wednesday 17 April 2019 11:25:51 +0000 (0:00:00.108) 0:02:18.238 ******* included: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-handler : check for a ceph mon socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:2 Wednesday 17 April 2019 11:25:52 +0000 (0:00:00.245) 0:02:18.484 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph mon socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:11 Wednesday 17 April 2019 11:25:52 +0000 (0:00:00.112) 0:02:18.597 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph mon socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:21 Wednesday 17 April 2019 11:25:52 +0000 (0:00:00.111) 0:02:18.708 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph osd socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:30 Wednesday 17 April 2019 11:25:52 +0000 (0:00:00.111) 0:02:18.819 ******* ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-osd*.asok delta: '0:00:00.002839' end: '2019-04-17 11:25:52.629489' failed_when_result: false rc: 0 start: '2019-04-17 11:25:52.626650' stderr: '' stderr_lines: [] stdout: /var/run/ceph/ceph-osd.3.asok stdout_lines: ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-osd*.asok delta: '0:00:01.004552' end: '2019-04-17 11:25:53.551986' failed_when_result: false rc: 0 start: '2019-04-17 11:25:52.547434' stderr: '' stderr_lines: [] stdout: /var/run/ceph/ceph-osd.0.asok stdout_lines: ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-osd*.asok delta: '0:00:01.004418' end: '2019-04-17 11:25:53.577894' failed_when_result: false rc: 0 start: '2019-04-17 11:25:52.573476' stderr: '' stderr_lines: [] stdout: /var/run/ceph/ceph-osd.0.asok/var/run/ceph/ceph-osd.1.asok/var/run/ceph/ceph-osd.3.asok stdout_lines: ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: stat --printf=%n /var/run/ceph/ceph-osd*.asok delta: '0:00:01.004399' end: '2019-04-17 11:25:53.601482' failed_when_result: false rc: 0 start: '2019-04-17 11:25:52.597083' stderr: '' stderr_lines: [] stdout: /var/run/ceph/ceph-osd.2.asok stdout_lines: TASK [ceph-handler : check if the ceph osd socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:40 Wednesday 17 April 2019 11:25:53 +0000 (0:00:01.233) 0:02:20.052 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - fuser - --silent - /var/run/ceph/ceph-osd.0.asok delta: '0:00:00.017325' end: '2019-04-17 11:25:53.796964' failed_when_result: false msg: non-zero return code rc: 1 start: '2019-04-17 11:25:53.779639' stderr: '' stderr_lines: [] stdout: '' stdout_lines: ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - fuser - --silent - /var/run/ceph/ceph-osd.0.asok/var/run/ceph/ceph-osd.1.asok/var/run/ceph/ceph-osd.3.asok delta: '0:00:00.026873' end: '2019-04-17 11:25:53.831941' failed_when_result: false msg: non-zero return code rc: 1 start: '2019-04-17 11:25:53.805068' stderr: 'Cannot stat /var/run/ceph/ceph-osd.0.asok/var/run/ceph/ceph-osd.1.asok/var/run/ceph/ceph-osd.3.asok: Not a directory' stderr_lines: - 'Cannot stat /var/run/ceph/ceph-osd.0.asok/var/run/ceph/ceph-osd.1.asok/var/run/ceph/ceph-osd.3.asok: Not a directory' stdout: '' stdout_lines: ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - fuser - --silent - /var/run/ceph/ceph-osd.2.asok delta: '0:00:00.018156' end: '2019-04-17 11:25:53.851420' failed_when_result: false msg: non-zero return code rc: 1 start: '2019-04-17 11:25:53.833264' stderr: '' stderr_lines: [] stdout: '' stdout_lines: ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - fuser - --silent - /var/run/ceph/ceph-osd.3.asok delta: '0:00:00.017383' end: '2019-04-17 11:25:53.875602' failed_when_result: false msg: non-zero return code rc: 1 start: '2019-04-17 11:25:53.858219' stderr: '' stderr_lines: [] stdout: '' stdout_lines: TASK [ceph-handler : remove ceph osd socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:50 Wednesday 17 April 2019 11:25:53 +0000 (0:00:00.274) 0:02:20.327 ******* changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true path: /var/run/ceph/ceph-osd.0.asok state: absent ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false path: /var/run/ceph/ceph-osd.0.asok/var/run/ceph/ceph-osd.1.asok/var/run/ceph/ceph-osd.3.asok state: absent changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true path: /var/run/ceph/ceph-osd.2.asok state: absent changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true path: /var/run/ceph/ceph-osd.3.asok state: absent TASK [ceph-handler : check for a ceph mds socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:59 Wednesday 17 April 2019 11:25:54 +0000 (0:00:00.258) 0:02:20.585 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph mds socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:69 Wednesday 17 April 2019 11:25:54 +0000 (0:00:00.113) 0:02:20.699 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph mds socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:79 Wednesday 17 April 2019 11:25:54 +0000 (0:00:00.112) 0:02:20.811 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph rgw socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:88 Wednesday 17 April 2019 11:25:54 +0000 (0:00:00.110) 0:02:20.921 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph rgw socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:98 Wednesday 17 April 2019 11:25:54 +0000 (0:00:00.116) 0:02:21.038 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph rgw socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:108 Wednesday 17 April 2019 11:25:54 +0000 (0:00:00.110) 0:02:21.148 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph mgr socket] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:117 Wednesday 17 April 2019 11:25:54 +0000 (0:00:00.113) 0:02:21.262 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph mgr socket is in-use] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:127 Wednesday 17 April 2019 11:25:54 +0000 (0:00:00.112) 0:02:21.374 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph mgr socket if exists and not used by a process] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:137 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.115) 0:02:21.490 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph rbd mirror socket] **************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:146 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.112) 0:02:21.602 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph rbd mirror socket is in-use] ***************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:156 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.113) 0:02:21.716 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph rbd mirror socket if exists and not used by a process] ********************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:166 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.111) 0:02:21.828 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a ceph nfs ganesha socket] *************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:175 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.111) 0:02:21.939 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check if the ceph nfs ganesha socket is in-use] **************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:184 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.116) 0:02:22.055 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : remove ceph nfs ganesha socket if exists and not used by a process] ******************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:194 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.113) 0:02:22.168 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a tcmu-runner] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:203 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.111) 0:02:22.280 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a rbd-target-api] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:212 Wednesday 17 April 2019 11:25:55 +0000 (0:00:00.112) 0:02:22.393 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-handler : check for a rbd-target-gw] ************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:221 Wednesday 17 April 2019 11:25:56 +0000 (0:00:00.120) 0:02:22.513 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include_tasks installs/install_on_redhat.yml] ******************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:2 Wednesday 17 April 2019 11:25:56 +0000 (0:00:00.112) 0:02:22.625 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_on_redhat.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : include configure_redhat_repository_installation.yml] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_on_redhat.yml:2 Wednesday 17 April 2019 11:25:56 +0000 (0:00:00.199) 0:02:22.825 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include configure_redhat_local_installation.yml] **************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_on_redhat.yml:7 Wednesday 17 April 2019 11:25:56 +0000 (0:00:00.114) 0:02:22.940 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include install_redhat_packages.yml] **************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_on_redhat.yml:12 Wednesday 17 April 2019 11:25:56 +0000 (0:00:00.111) 0:02:23.051 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : install redhat dependencies] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:2 Wednesday 17 April 2019 11:25:56 +0000 (0:00:00.193) 0:02:23.244 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: python3-pycurl' - 'Installed: python3-setuptools' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: python3-pycurl' - 'Installed: python3-setuptools' ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: python3-pycurl' - 'Installed: python3-setuptools' ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: python3-pycurl' - 'Installed: python3-setuptools' TASK [ceph-common : install centos dependencies] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:11 Wednesday 17 April 2019 11:26:04 +0000 (0:00:07.236) 0:02:30.481 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : install redhat ceph packages] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:20 Wednesday 17 April 2019 11:26:04 +0000 (0:00:00.112) 0:02:30.594 ******* changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true attempts: 1 msg: '' rc: 0 results: - 'Installed: ceph-common' - 'Installed: ceph-osd' - 'Installed: libradosstriper1-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librbd1-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librgw2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-cephfs-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rados-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rbd-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-osd-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librados2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rgw-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-selinux-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: libcephfs2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-base-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-common-2:14.2.0-142.g2f9c072.el8cp.x86_64' changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true attempts: 1 msg: '' rc: 0 results: - 'Installed: ceph-common' - 'Installed: ceph-osd' - 'Installed: librbd1-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librados2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librgw2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-cephfs-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rados-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rbd-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-osd-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rgw-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-selinux-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: libcephfs2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-base-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-common-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: libradosstriper1-2:14.2.0-142.g2f9c072.el8cp.x86_64' changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true attempts: 1 msg: '' rc: 0 results: - 'Installed: ceph-common' - 'Installed: ceph-osd' - 'Installed: libradosstriper1-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librbd1-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librgw2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-cephfs-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rados-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rbd-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-osd-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librados2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rgw-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-selinux-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: libcephfs2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-base-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-common-2:14.2.0-142.g2f9c072.el8cp.x86_64' changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true attempts: 1 msg: '' rc: 0 results: - 'Installed: ceph-common' - 'Installed: ceph-osd' - 'Installed: libradosstriper1-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librbd1-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librgw2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-cephfs-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rados-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rbd-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-osd-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: librados2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: python3-rgw-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-selinux-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: libcephfs2-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-base-2:14.2.0-142.g2f9c072.el8cp.x86_64' - 'Installed: ceph-common-2:14.2.0-142.g2f9c072.el8cp.x86_64' TASK [ceph-common : include_tasks installs/install_on_suse.yml] ********************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:8 Wednesday 17 April 2019 11:26:22 +0000 (0:00:18.785) 0:02:49.380 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include installs/install_on_debian.yml] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:14 Wednesday 17 April 2019 11:26:23 +0000 (0:00:00.111) 0:02:49.492 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include_tasks installs/install_on_clear.yml] ******************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:21 Wednesday 17 April 2019 11:26:23 +0000 (0:00:00.111) 0:02:49.604 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : get ceph version] *********************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:27 Wednesday 17 April 2019 11:26:23 +0000 (0:00:00.113) 0:02:49.717 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - ceph - --version delta: '0:00:00.089447' end: '2019-04-17 11:26:23.530246' rc: 0 start: '2019-04-17 11:26:23.440799' stderr: '' stderr_lines: [] stdout: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stdout_lines: ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - ceph - --version delta: '0:00:00.092568' end: '2019-04-17 11:26:23.568039' rc: 0 start: '2019-04-17 11:26:23.475471' stderr: '' stderr_lines: [] stdout: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stdout_lines: ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - ceph - --version delta: '0:00:00.093977' end: '2019-04-17 11:26:23.642411' rc: 0 start: '2019-04-17 11:26:23.548434' stderr: '' stderr_lines: [] stdout: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stdout_lines: ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - ceph - --version delta: '0:00:00.094707' end: '2019-04-17 11:26:23.671384' rc: 0 start: '2019-04-17 11:26:23.576677' stderr: '' stderr_lines: [] stdout: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stdout_lines: TASK [ceph-common : set_fact ceph_version] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:33 Wednesday 17 April 2019 11:26:23 +0000 (0:00:00.405) 0:02:50.123 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_version: 14.2.0-142-g2f9c072 ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_version: 14.2.0-142-g2f9c072 ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_version: 14.2.0-142-g2f9c072 ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_version: 14.2.0-142-g2f9c072 TASK [ceph-common : include release-rhcs.yml] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:38 Wednesday 17 April 2019 11:26:23 +0000 (0:00:00.124) 0:02:50.248 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : set_fact ceph_release jewel] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:2 Wednesday 17 April 2019 11:26:24 +0000 (0:00:00.201) 0:02:50.449 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : set_fact ceph_release kraken] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:8 Wednesday 17 April 2019 11:26:24 +0000 (0:00:00.112) 0:02:50.561 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : set_fact ceph_release luminous] ********************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:14 Wednesday 17 April 2019 11:26:24 +0000 (0:00:00.111) 0:02:50.673 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : set_fact ceph_release mimic] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:20 Wednesday 17 April 2019 11:26:24 +0000 (0:00:00.111) 0:02:50.785 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : set_fact ceph_release nautilus] ********************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/release-rhcs.yml:26 Wednesday 17 April 2019 11:26:24 +0000 (0:00:00.112) 0:02:50.897 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: nautilus ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: nautilus ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: nautilus ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_release: nautilus TASK [ceph-common : set_fact ceph_release - override ceph_release with ceph_stable_release] ***************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:47 Wednesday 17 April 2019 11:26:24 +0000 (0:00:00.180) 0:02:51.077 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include create_rbd_client_dir.yml] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:56 Wednesday 17 April 2019 11:26:24 +0000 (0:00:00.110) 0:02:51.187 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/create_rbd_client_dir.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : create rbd client directory] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/create_rbd_client_dir.yml:2 Wednesday 17 April 2019 11:26:25 +0000 (0:00:00.240) 0:02:51.427 ******* changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/var/run/ceph) => changed=true gid: 167 group: ceph item: /var/run/ceph mode: '0770' owner: ceph path: /var/run/ceph size: 40 state: directory uid: 167 changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/var/run/ceph) => changed=true gid: 167 group: ceph item: /var/run/ceph mode: '0770' owner: ceph path: /var/run/ceph size: 100 state: directory uid: 167 changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/var/run/ceph) => changed=true gid: 167 group: ceph item: /var/run/ceph mode: '0770' owner: ceph path: /var/run/ceph size: 40 state: directory uid: 167 changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/var/run/ceph) => changed=true gid: 167 group: ceph item: /var/run/ceph mode: '0770' owner: ceph path: /var/run/ceph size: 40 state: directory uid: 167 changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/var/log/ceph) => changed=true gid: 167 group: ceph item: /var/log/ceph mode: '0770' owner: ceph path: /var/log/ceph size: 6 state: directory uid: 167 changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/var/log/ceph) => changed=true gid: 167 group: ceph item: /var/log/ceph mode: '0770' owner: ceph path: /var/log/ceph size: 6 state: directory uid: 167 changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/var/log/ceph) => changed=true gid: 167 group: ceph item: /var/log/ceph mode: '0770' owner: ceph path: /var/log/ceph size: 6 state: directory uid: 167 changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/var/log/ceph) => changed=true gid: 167 group: ceph item: /var/log/ceph mode: '0770' owner: ceph path: /var/log/ceph size: 6 state: directory uid: 167 TASK [ceph-common : include configure_cluster_name.yml] ***************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:59 Wednesday 17 April 2019 11:26:25 +0000 (0:00:00.388) 0:02:51.816 ******* included: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-common : configure cluster name] ***************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml:2 Wednesday 17 April 2019 11:26:25 +0000 (0:00:00.203) 0:02:52.020 ******* changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true backup: '' msg: line added changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true backup: '' msg: line added changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true backup: '' msg: line added changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true backup: '' msg: line added TASK [ceph-common : check /etc/default/ceph exist] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml:24 Wednesday 17 April 2019 11:26:25 +0000 (0:00:00.253) 0:02:52.274 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : when /etc/default/ceph is not dir] ****************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml:32 Wednesday 17 April 2019 11:26:25 +0000 (0:00:00.111) 0:02:52.385 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : when /etc/default/ceph is dir] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/configure_cluster_name.yml:42 Wednesday 17 April 2019 11:26:26 +0000 (0:00:00.112) 0:02:52.498 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-common : include configure_memory_allocator.yml] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-common/tasks/main.yml:62 Wednesday 17 April 2019 11:26:26 +0000 (0:00:00.113) 0:02:52.611 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : include create_ceph_initial_dirs.yml] *************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:2 Wednesday 17 April 2019 11:26:26 +0000 (0:00:00.113) 0:02:52.725 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : count number of osds for ceph-disk scenarios] ******************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:8 Wednesday 17 April 2019 11:26:26 +0000 (0:00:00.110) 0:02:52.835 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : count number of osds for lvm scenario] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:15 Wednesday 17 April 2019 11:26:26 +0000 (0:00:00.114) 0:02:52.949 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : run 'ceph-volume lvm batch --report' to see how many osds are to be created] ************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:22 Wednesday 17 April 2019 11:26:26 +0000 (0:00:00.121) 0:02:53.070 ******* changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true cmd: - ceph-volume - --cluster - ceph - lvm - batch - --bluestore - --yes - --osds-per-device - '2' - /dev/nvme0n1 - /dev/nvme1n1 - /dev/nvme2n1 - /dev/nvme3n1 - /dev/nvme4n1 - --report - --format=json delta: '0:00:00.717780' end: '2019-04-17 11:26:27.592829' rc: 0 start: '2019-04-17 11:26:26.875049' stderr: '' stderr_lines: [] stdout: |- { "changed": true, "osds": [ { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme0n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme0n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme1n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme1n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme2n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme2n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme3n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme3n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme4n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme4n1", "percentage": 50.0, "size": 399431958528 } } ], "vgs": [] } stdout_lines: changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true cmd: - ceph-volume - --cluster - ceph - lvm - batch - --bluestore - --yes - --osds-per-device - '2' - /dev/nvme0n1 - /dev/nvme1n1 - /dev/nvme2n1 - /dev/nvme3n1 - /dev/nvme4n1 - --report - --format=json delta: '0:00:00.732074' end: '2019-04-17 11:26:27.609058' rc: 0 start: '2019-04-17 11:26:26.876984' stderr: '' stderr_lines: [] stdout: |- { "changed": true, "osds": [ { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme0n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme0n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme1n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme1n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme2n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme2n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme3n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme3n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme4n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme4n1", "percentage": 50.0, "size": 399431958528 } } ], "vgs": [] } stdout_lines: changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true cmd: - ceph-volume - --cluster - ceph - lvm - batch - --bluestore - --yes - --osds-per-device - '2' - /dev/nvme0n1 - /dev/nvme1n1 - /dev/nvme2n1 - /dev/nvme3n1 - /dev/nvme4n1 - --report - --format=json delta: '0:00:00.738171' end: '2019-04-17 11:26:27.612747' rc: 0 start: '2019-04-17 11:26:26.874576' stderr: '' stderr_lines: [] stdout: |- { "changed": true, "osds": [ { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme0n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme0n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme1n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme1n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme2n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme2n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme3n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme3n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme4n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme4n1", "percentage": 50.0, "size": 399431958528 } } ], "vgs": [] } stdout_lines: changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true cmd: - ceph-volume - --cluster - ceph - lvm - batch - --bluestore - --yes - --osds-per-device - '2' - /dev/nvme0n1 - /dev/nvme1n1 - /dev/nvme2n1 - /dev/nvme3n1 - /dev/nvme4n1 - --report - --format=json delta: '0:00:00.760287' end: '2019-04-17 11:26:27.639071' rc: 0 start: '2019-04-17 11:26:26.878784' stderr: '' stderr_lines: [] stdout: |- { "changed": true, "osds": [ { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme0n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme0n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme1n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme1n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme2n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme2n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme3n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme3n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme4n1", "percentage": 50.0, "size": 399431958528 } }, { "block.db": {}, "data": { "human_readable_size": "372.00 GB", "parts": 2, "path": "/dev/nvme4n1", "percentage": 50.0, "size": 399431958528 } } ], "vgs": [] } stdout_lines: TASK [ceph-config : set_fact num_osds from the output of 'ceph-volume lvm batch --report'] ****************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:41 Wednesday 17 April 2019 11:26:27 +0000 (0:00:01.026) 0:02:54.097 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: num_osds: '10' ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: num_osds: '10' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: num_osds: '10' ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: num_osds: '10' TASK [ceph-config : run 'ceph-volume lvm list' to see how many osds have already been created] ************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:49 Wednesday 17 April 2019 11:26:27 +0000 (0:00:00.132) 0:02:54.229 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : set_fact num_osds from the output of 'ceph-volume lvm list'] **************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:62 Wednesday 17 April 2019 11:26:27 +0000 (0:00:00.118) 0:02:54.348 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : create ceph conf directory] ************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:75 Wednesday 17 April 2019 11:26:28 +0000 (0:00:00.116) 0:02:54.464 ******* changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true gid: 167 group: ceph mode: '0755' owner: ceph path: /etc/ceph size: 20 state: directory uid: 167 changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true gid: 167 group: ceph mode: '0755' owner: ceph path: /etc/ceph size: 20 state: directory uid: 167 changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true gid: 167 group: ceph mode: '0755' owner: ceph path: /etc/ceph size: 20 state: directory uid: 167 changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true gid: 167 group: ceph mode: '0755' owner: ceph path: /etc/ceph size: 20 state: directory uid: 167 TASK [ceph-config : generate ceph configuration file: ceph.conf] ******************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:83 Wednesday 17 April 2019 11:26:28 +0000 (0:00:00.255) 0:02:54.720 ******* NOTIFIED HANDLER ceph-handler : set _mon_handler_called before restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mon restart script for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mon daemon(s) - non container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mon daemon(s) - container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mon_handler_called after restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _osd_handler_called before restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy osd restart script for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph osds daemon(s) - non container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph osds daemon(s) - container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _osd_handler_called after restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mds_handler_called before restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mds restart script for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mds daemon(s) - non container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mds daemon(s) - container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mds_handler_called after restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rgw_handler_called before restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy rgw restart script for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rgw daemon(s) - non container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rgw daemon(s) - container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rgw_handler_called after restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mgr_handler_called before restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mgr restart script for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mgr daemon(s) - non container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mgr daemon(s) - container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mgr_handler_called after restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rbdmirror_handler_called before restart for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy rbd mirror restart script for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rbd mirror daemon(s) - non container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rbd mirror daemon(s) - container for e23-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rbdmirror_handler_called after restart for e23-h05-740xd.alias.bos.scalelab.redhat.com changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true checksum: 25ff6fb48c949938165b43fb55cd3755c862d04a dest: /etc/ceph/ceph.conf gid: 167 group: ceph md5sum: 2fce21117850ea21e64af0531382c211 mode: '0644' owner: ceph size: 437 src: /root/.ansible/tmp/ansible-tmp-1555500388.3381162-53229136820132/source state: file uid: 167 NOTIFIED HANDLER ceph-handler : set _mon_handler_called before restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mon restart script for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mon daemon(s) - non container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mon daemon(s) - container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mon_handler_called after restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _osd_handler_called before restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy osd restart script for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph osds daemon(s) - non container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph osds daemon(s) - container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _osd_handler_called after restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mds_handler_called before restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mds restart script for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mds daemon(s) - non container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mds daemon(s) - container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mds_handler_called after restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rgw_handler_called before restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy rgw restart script for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rgw daemon(s) - non container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rgw daemon(s) - container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rgw_handler_called after restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mgr_handler_called before restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mgr restart script for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mgr daemon(s) - non container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mgr daemon(s) - container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mgr_handler_called after restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rbdmirror_handler_called before restart for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy rbd mirror restart script for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rbd mirror daemon(s) - non container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rbd mirror daemon(s) - container for e24-h05-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rbdmirror_handler_called after restart for e24-h05-740xd.alias.bos.scalelab.redhat.com changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=true checksum: 25ff6fb48c949938165b43fb55cd3755c862d04a dest: /etc/ceph/ceph.conf gid: 167 group: ceph md5sum: 2fce21117850ea21e64af0531382c211 mode: '0644' owner: ceph size: 437 src: /root/.ansible/tmp/ansible-tmp-1555500388.3618033-47923445293287/source state: file uid: 167 NOTIFIED HANDLER ceph-handler : set _mon_handler_called before restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mon restart script for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mon daemon(s) - non container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mon daemon(s) - container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mon_handler_called after restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _osd_handler_called before restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy osd restart script for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph osds daemon(s) - non container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph osds daemon(s) - container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _osd_handler_called after restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mds_handler_called before restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mds restart script for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mds daemon(s) - non container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mds daemon(s) - container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mds_handler_called after restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rgw_handler_called before restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy rgw restart script for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rgw daemon(s) - non container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rgw daemon(s) - container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rgw_handler_called after restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mgr_handler_called before restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mgr restart script for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mgr daemon(s) - non container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mgr daemon(s) - container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mgr_handler_called after restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rbdmirror_handler_called before restart for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy rbd mirror restart script for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rbd mirror daemon(s) - non container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rbd mirror daemon(s) - container for e24-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rbdmirror_handler_called after restart for e24-h07-740xd.alias.bos.scalelab.redhat.com changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true checksum: 25ff6fb48c949938165b43fb55cd3755c862d04a dest: /etc/ceph/ceph.conf gid: 167 group: ceph md5sum: 2fce21117850ea21e64af0531382c211 mode: '0644' owner: ceph size: 437 src: /root/.ansible/tmp/ansible-tmp-1555500388.3923397-274462083327427/source state: file uid: 167 NOTIFIED HANDLER ceph-handler : set _mon_handler_called before restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mon restart script for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mon daemon(s) - non container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mon daemon(s) - container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mon_handler_called after restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _osd_handler_called before restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy osd restart script for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph osds daemon(s) - non container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph osds daemon(s) - container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _osd_handler_called after restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mds_handler_called before restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mds restart script for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mds daemon(s) - non container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mds daemon(s) - container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mds_handler_called after restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rgw_handler_called before restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy rgw restart script for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rgw daemon(s) - non container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rgw daemon(s) - container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rgw_handler_called after restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mgr_handler_called before restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy mgr restart script for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mgr daemon(s) - non container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph mgr daemon(s) - container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _mgr_handler_called after restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rbdmirror_handler_called before restart for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : copy rbd mirror restart script for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rbd mirror daemon(s) - non container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : restart ceph rbd mirror daemon(s) - container for e23-h07-740xd.alias.bos.scalelab.redhat.com NOTIFIED HANDLER ceph-handler : set _rbdmirror_handler_called after restart for e23-h07-740xd.alias.bos.scalelab.redhat.com changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=true checksum: 25ff6fb48c949938165b43fb55cd3755c862d04a dest: /etc/ceph/ceph.conf gid: 167 group: ceph md5sum: 2fce21117850ea21e64af0531382c211 mode: '0644' owner: ceph size: 437 src: /root/.ansible/tmp/ansible-tmp-1555500388.4211795-19967754526276/source state: file uid: 167 TASK [ceph-config : ensure fetch directory exists] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:101 Wednesday 17 April 2019 11:26:42 +0000 (0:00:13.901) 0:03:08.622 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : generate ceph.conf configuration file locally] ****************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:112 Wednesday 17 April 2019 11:26:42 +0000 (0:00:00.041) 0:03:08.663 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : create a local fetch directory if it does not exist] ************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:129 Wednesday 17 April 2019 11:26:42 +0000 (0:00:00.041) 0:03:08.705 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-config : generate ceph.conf configuration file] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:142 Wednesday 17 April 2019 11:26:42 +0000 (0:00:00.036) 0:03:08.741 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : include_tasks system_tuning.yml] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:2 Wednesday 17 April 2019 11:26:42 +0000 (0:00:00.111) 0:03:08.852 ******* included: /usr/share/ceph-ansible/roles/ceph-osd/tasks/system_tuning.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-osd : disable osd directory parsing by updatedb] ************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/system_tuning.yml:4 Wednesday 17 April 2019 11:26:42 +0000 (0:00:00.200) 0:03:09.052 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : disable osd directory path in updatedb.conf] *********************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/system_tuning.yml:8 Wednesday 17 April 2019 11:26:42 +0000 (0:00:00.110) 0:03:09.162 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : create tmpfiles.d directory] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/system_tuning.yml:18 Wednesday 17 April 2019 11:26:42 +0000 (0:00:00.112) 0:03:09.275 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : disable transparent hugepage] ************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/system_tuning.yml:29 Wednesday 17 April 2019 11:26:42 +0000 (0:00:00.110) 0:03:09.386 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : get default vm.min_free_kbytes] ************************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/system_tuning.yml:41 Wednesday 17 April 2019 11:26:43 +0000 (0:00:00.114) 0:03:09.500 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - sysctl - -b - vm.min_free_kbytes delta: '0:00:00.003099' end: '2019-04-17 11:26:43.286255' failed_when_result: false rc: 0 start: '2019-04-17 11:26:43.283156' stderr: '' stderr_lines: [] stdout: '4194303' stdout_lines: ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - sysctl - -b - vm.min_free_kbytes delta: '0:00:00.003339' end: '2019-04-17 11:26:43.312713' failed_when_result: false rc: 0 start: '2019-04-17 11:26:43.309374' stderr: '' stderr_lines: [] stdout: '4194303' stdout_lines: ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - sysctl - -b - vm.min_free_kbytes delta: '0:00:00.003404' end: '2019-04-17 11:26:43.340529' failed_when_result: false rc: 0 start: '2019-04-17 11:26:43.337125' stderr: '' stderr_lines: [] stdout: '4194303' stdout_lines: ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false cmd: - sysctl - -b - vm.min_free_kbytes delta: '0:00:00.003375' end: '2019-04-17 11:26:43.362659' failed_when_result: false rc: 0 start: '2019-04-17 11:26:43.359284' stderr: '' stderr_lines: [] stdout: '4194303' stdout_lines: TASK [ceph-osd : set_fact vm_min_free_kbytes] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/system_tuning.yml:48 Wednesday 17 April 2019 11:26:43 +0000 (0:00:00.313) 0:03:09.814 ******* ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: vm_min_free_kbytes: '4194303' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: vm_min_free_kbytes: '4194303' ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: vm_min_free_kbytes: '4194303' ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: vm_min_free_kbytes: '4194303' TASK [ceph-osd : apply operating system tuning] ************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/system_tuning.yml:52 Wednesday 17 April 2019 11:26:43 +0000 (0:00:00.126) 0:03:09.940 ******* ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'fs.aio-max-nr', 'value': '1048576', 'enable': "(osd_objectstore == 'bluestore')"}) => changed=false item: enable: (osd_objectstore == 'bluestore') name: fs.aio-max-nr value: '1048576' ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'fs.aio-max-nr', 'value': '1048576', 'enable': "(osd_objectstore == 'bluestore')"}) => changed=false item: enable: (osd_objectstore == 'bluestore') name: fs.aio-max-nr value: '1048576' ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'fs.aio-max-nr', 'value': '1048576', 'enable': "(osd_objectstore == 'bluestore')"}) => changed=false item: enable: (osd_objectstore == 'bluestore') name: fs.aio-max-nr value: '1048576' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'fs.aio-max-nr', 'value': '1048576', 'enable': "(osd_objectstore == 'bluestore')"}) => changed=false item: enable: (osd_objectstore == 'bluestore') name: fs.aio-max-nr value: '1048576' ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'fs.file-max', 'value': 26234859}) => changed=false item: name: fs.file-max value: 26234859 ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'fs.file-max', 'value': 26234859}) => changed=false item: name: fs.file-max value: 26234859 ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'fs.file-max', 'value': 26234859}) => changed=false item: name: fs.file-max value: 26234859 ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'fs.file-max', 'value': 26234859}) => changed=false item: name: fs.file-max value: 26234859 ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.zone_reclaim_mode', 'value': 0}) => changed=false item: name: vm.zone_reclaim_mode value: 0 ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.zone_reclaim_mode', 'value': 0}) => changed=false item: name: vm.zone_reclaim_mode value: 0 ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.zone_reclaim_mode', 'value': 0}) => changed=false item: name: vm.zone_reclaim_mode value: 0 ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.zone_reclaim_mode', 'value': 0}) => changed=false item: name: vm.zone_reclaim_mode value: 0 ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.swappiness', 'value': 10}) => changed=false item: name: vm.swappiness value: 10 ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.swappiness', 'value': 10}) => changed=false item: name: vm.swappiness value: 10 ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.swappiness', 'value': 10}) => changed=false item: name: vm.swappiness value: 10 ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.swappiness', 'value': 10}) => changed=false item: name: vm.swappiness value: 10 ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.min_free_kbytes', 'value': '4194303'}) => changed=false item: name: vm.min_free_kbytes value: '4194303' ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.min_free_kbytes', 'value': '4194303'}) => changed=false item: name: vm.min_free_kbytes value: '4194303' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.min_free_kbytes', 'value': '4194303'}) => changed=false item: name: vm.min_free_kbytes value: '4194303' ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': 'vm.min_free_kbytes', 'value': '4194303'}) => changed=false item: name: vm.min_free_kbytes value: '4194303' TASK [ceph-osd : install dependencies] ********************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:5 Wednesday 17 April 2019 11:26:44 +0000 (0:00:00.899) 0:03:10.840 ******* ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: parted' ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: parted' ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: parted' ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false attempts: 1 msg: Nothing to do rc: 0 results: - 'Installed: parted' TASK [ceph-osd : install numactl when needed] *************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:15 Wednesday 17 April 2019 11:26:45 +0000 (0:00:00.989) 0:03:11.830 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : include_tasks common.yml] ****************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:26 Wednesday 17 April 2019 11:26:45 +0000 (0:00:00.112) 0:03:11.943 ******* included: /usr/share/ceph-ansible/roles/ceph-osd/tasks/common.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-osd : create bootstrap-osd and osd directories] ************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/common.yml:2 Wednesday 17 April 2019 11:26:45 +0000 (0:00:00.203) 0:03:12.146 ******* changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/var/lib/ceph/bootstrap-osd/) => changed=true gid: 167 group: ceph item: /var/lib/ceph/bootstrap-osd/ mode: '0755' owner: ceph path: /var/lib/ceph/bootstrap-osd/ size: 6 state: directory uid: 167 changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/var/lib/ceph/bootstrap-osd/) => changed=true gid: 167 group: ceph item: /var/lib/ceph/bootstrap-osd/ mode: '0755' owner: ceph path: /var/lib/ceph/bootstrap-osd/ size: 6 state: directory uid: 167 changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/var/lib/ceph/bootstrap-osd/) => changed=true gid: 167 group: ceph item: /var/lib/ceph/bootstrap-osd/ mode: '0755' owner: ceph path: /var/lib/ceph/bootstrap-osd/ size: 6 state: directory uid: 167 changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/var/lib/ceph/bootstrap-osd/) => changed=true gid: 167 group: ceph item: /var/lib/ceph/bootstrap-osd/ mode: '0755' owner: ceph path: /var/lib/ceph/bootstrap-osd/ size: 6 state: directory uid: 167 changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/var/lib/ceph/osd/) => changed=true gid: 167 group: ceph item: /var/lib/ceph/osd/ mode: '0755' owner: ceph path: /var/lib/ceph/osd/ size: 6 state: directory uid: 167 changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/var/lib/ceph/osd/) => changed=true gid: 167 group: ceph item: /var/lib/ceph/osd/ mode: '0755' owner: ceph path: /var/lib/ceph/osd/ size: 6 state: directory uid: 167 changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/var/lib/ceph/osd/) => changed=true gid: 167 group: ceph item: /var/lib/ceph/osd/ mode: '0755' owner: ceph path: /var/lib/ceph/osd/ size: 6 state: directory uid: 167 changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/var/lib/ceph/osd/) => changed=true gid: 167 group: ceph item: /var/lib/ceph/osd/ mode: '0755' owner: ceph path: /var/lib/ceph/osd/ size: 6 state: directory uid: 167 TASK [ceph-osd : copy ceph key(s) if needed] **************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/common.yml:15 Wednesday 17 April 2019 11:26:46 +0000 (0:00:00.443) 0:03:12.590 ******* changed: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/var/lib/ceph/bootstrap-osd/ceph.keyring', 'copy_key': True}) => changed=true checksum: c94775b0ecfd59af4634e3227a4a45be77f52094 dest: /var/lib/ceph/bootstrap-osd/ceph.keyring gid: 167 group: ceph item: copy_key: true name: /var/lib/ceph/bootstrap-osd/ceph.keyring md5sum: 24189cb6ab33593b962368311e70954c mode: '0600' owner: ceph size: 113 src: /root/.ansible/tmp/ansible-tmp-1555500406.209846-24481991501845/source state: file uid: 167 skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.client.admin.keyring', 'copy_key': False}) => changed=false item: copy_key: false name: /etc/ceph/ceph.client.admin.keyring skip_reason: Conditional result was False changed: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/var/lib/ceph/bootstrap-osd/ceph.keyring', 'copy_key': True}) => changed=true checksum: c94775b0ecfd59af4634e3227a4a45be77f52094 dest: /var/lib/ceph/bootstrap-osd/ceph.keyring gid: 167 group: ceph item: copy_key: true name: /var/lib/ceph/bootstrap-osd/ceph.keyring md5sum: 24189cb6ab33593b962368311e70954c mode: '0600' owner: ceph size: 113 src: /root/.ansible/tmp/ansible-tmp-1555500406.2347102-102737878855586/source state: file uid: 167 skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.client.admin.keyring', 'copy_key': False}) => changed=false item: copy_key: false name: /etc/ceph/ceph.client.admin.keyring skip_reason: Conditional result was False changed: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/var/lib/ceph/bootstrap-osd/ceph.keyring', 'copy_key': True}) => changed=true checksum: c94775b0ecfd59af4634e3227a4a45be77f52094 dest: /var/lib/ceph/bootstrap-osd/ceph.keyring gid: 167 group: ceph item: copy_key: true name: /var/lib/ceph/bootstrap-osd/ceph.keyring md5sum: 24189cb6ab33593b962368311e70954c mode: '0600' owner: ceph size: 113 src: /root/.ansible/tmp/ansible-tmp-1555500406.2657592-269963664177482/source state: file uid: 167 skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.client.admin.keyring', 'copy_key': False}) => changed=false item: copy_key: false name: /etc/ceph/ceph.client.admin.keyring skip_reason: Conditional result was False changed: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/var/lib/ceph/bootstrap-osd/ceph.keyring', 'copy_key': True}) => changed=true checksum: c94775b0ecfd59af4634e3227a4a45be77f52094 dest: /var/lib/ceph/bootstrap-osd/ceph.keyring gid: 167 group: ceph item: copy_key: true name: /var/lib/ceph/bootstrap-osd/ceph.keyring md5sum: 24189cb6ab33593b962368311e70954c mode: '0600' owner: ceph size: 113 src: /root/.ansible/tmp/ansible-tmp-1555500406.295119-46017929097729/source state: file uid: 167 skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item={'name': '/etc/ceph/ceph.client.admin.keyring', 'copy_key': False}) => changed=false item: copy_key: false name: /etc/ceph/ceph.client.admin.keyring skip_reason: Conditional result was False TASK [ceph-osd : include ceph_disk_cli_options_facts.yml] *************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:29 Wednesday 17 April 2019 11:26:46 +0000 (0:00:00.495) 0:03:13.085 ******* included: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-osd : set_fact ceph_disk_cli_options '--cluster ceph --bluestore'] ******************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml:2 Wednesday 17 April 2019 11:26:46 +0000 (0:00:00.210) 0:03:13.296 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_disk_cli_options: --cluster ceph --bluestore ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_disk_cli_options: --cluster ceph --bluestore ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_disk_cli_options: --cluster ceph --bluestore ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false ansible_facts: ceph_disk_cli_options: --cluster ceph --bluestore TASK [ceph-osd : set_fact ceph_disk_cli_options 'ceph_disk_cli_options'] ************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml:10 Wednesday 17 April 2019 11:26:47 +0000 (0:00:00.133) 0:03:13.429 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : set_fact ceph_disk_cli_options '--cluster ceph --bluestore --dmcrypt'] ********************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml:18 Wednesday 17 April 2019 11:26:47 +0000 (0:00:00.115) 0:03:13.545 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : set_fact ceph_disk_cli_options '--cluster ceph --filestore --dmcrypt'] ********************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml:26 Wednesday 17 April 2019 11:26:47 +0000 (0:00:00.114) 0:03:13.660 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : set_fact docker_env_args '-e OSD_BLUESTORE=0 -e OSD_FILESTORE=1 -e OSD_DMCRYPT=0'] ********************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml:34 Wednesday 17 April 2019 11:26:47 +0000 (0:00:00.111) 0:03:13.771 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : set_fact docker_env_args '-e OSD_BLUESTORE=0 -e OSD_FILESTORE=1 -e OSD_DMCRYPT=1'] ********************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml:42 Wednesday 17 April 2019 11:26:47 +0000 (0:00:00.109) 0:03:13.881 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : set_fact docker_env_args '-e OSD_BLUESTORE=1 -e OSD_FILESTORE=0 -e OSD_DMCRYPT=0'] ********************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml:50 Wednesday 17 April 2019 11:26:47 +0000 (0:00:00.113) 0:03:13.994 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : set_fact docker_env_args '-e OSD_BLUESTORE=1 -e OSD_FILESTORE=0 -e OSD_DMCRYPT=1'] ********************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/ceph_disk_cli_options_facts.yml:58 Wednesday 17 April 2019 11:26:47 +0000 (0:00:00.109) 0:03:14.104 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : include build_devices.yml] ***************************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:32 Wednesday 17 April 2019 11:26:47 +0000 (0:00:00.109) 0:03:14.213 ******* included: /usr/share/ceph-ansible/roles/ceph-osd/tasks/build_devices.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-osd : resolve dedicated device link(s)] ********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/build_devices.yml:2 Wednesday 17 April 2019 11:26:48 +0000 (0:00:00.209) 0:03:14.423 ******* TASK [ceph-osd : set_fact build dedicated_devices from resolved symlinks] *********************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/build_devices.yml:11 Wednesday 17 April 2019 11:26:48 +0000 (0:00:00.108) 0:03:14.532 ******* TASK [ceph-osd : set_fact build final dedicated_devices list] *********************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/build_devices.yml:19 Wednesday 17 April 2019 11:26:48 +0000 (0:00:00.110) 0:03:14.643 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : read information about the devices] ******************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:35 Wednesday 17 April 2019 11:26:48 +0000 (0:00:00.111) 0:03:14.754 ******* ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme0n1) => changed=false disk: dev: /dev/nvme0n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme0n1 partitions: [] script: unit 'MiB' print ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme0n1) => changed=false disk: dev: /dev/nvme0n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme0n1 partitions: [] script: unit 'MiB' print ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme0n1) => changed=false disk: dev: /dev/nvme0n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme0n1 partitions: [] script: unit 'MiB' print ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme0n1) => changed=false disk: dev: /dev/nvme0n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme0n1 partitions: [] script: unit 'MiB' print ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme1n1) => changed=false disk: dev: /dev/nvme1n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme1n1 partitions: [] script: unit 'MiB' print ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme1n1) => changed=false disk: dev: /dev/nvme1n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme1n1 partitions: [] script: unit 'MiB' print ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme1n1) => changed=false disk: dev: /dev/nvme1n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme1n1 partitions: [] script: unit 'MiB' print ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme1n1) => changed=false disk: dev: /dev/nvme1n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme1n1 partitions: [] script: unit 'MiB' print ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme2n1) => changed=false disk: dev: /dev/nvme2n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme2n1 partitions: [] script: unit 'MiB' print ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme2n1) => changed=false disk: dev: /dev/nvme2n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme2n1 partitions: [] script: unit 'MiB' print ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme2n1) => changed=false disk: dev: /dev/nvme2n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme2n1 partitions: [] script: unit 'MiB' print ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme2n1) => changed=false disk: dev: /dev/nvme2n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme2n1 partitions: [] script: unit 'MiB' print ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme3n1) => changed=false disk: dev: /dev/nvme3n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme3n1 partitions: [] script: unit 'MiB' print ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme3n1) => changed=false disk: dev: /dev/nvme3n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme3n1 partitions: [] script: unit 'MiB' print ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme3n1) => changed=false disk: dev: /dev/nvme3n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme3n1 partitions: [] script: unit 'MiB' print ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme3n1) => changed=false disk: dev: /dev/nvme3n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme3n1 partitions: [] script: unit 'MiB' print ok: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme4n1) => changed=false disk: dev: /dev/nvme4n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme4n1 partitions: [] script: unit 'MiB' print ok: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme4n1) => changed=false disk: dev: /dev/nvme4n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme4n1 partitions: [] script: unit 'MiB' print ok: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme4n1) => changed=false disk: dev: /dev/nvme4n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme4n1 partitions: [] script: unit 'MiB' print ok: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => (item=/dev/nvme4n1) => changed=false disk: dev: /dev/nvme4n1 logical_block: 512 model: NVMe Device physical_block: 512 size: 763098.0 table: unknown unit: mib item: /dev/nvme4n1 partitions: [] script: unit 'MiB' print TASK [ceph-osd : include check_gpt.yml] ********************************************************************************************************************************************************************************************************************* task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:42 Wednesday 17 April 2019 11:26:49 +0000 (0:00:00.914) 0:03:15.668 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : include_tasks scenarios/collocated.yml] **************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:47 Wednesday 17 April 2019 11:26:49 +0000 (0:00:00.114) 0:03:15.783 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : include_tasks scenarios/non-collocated.yml] ************************************************************************************************************************************************************************************************ task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:52 Wednesday 17 April 2019 11:26:49 +0000 (0:00:00.111) 0:03:15.894 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : include_tasks scenarios/lvm.yml] *********************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:58 Wednesday 17 April 2019 11:26:49 +0000 (0:00:00.114) 0:03:16.009 ******* skipping: [e23-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h05-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e24-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False skipping: [e23-h07-740xd.alias.bos.scalelab.redhat.com] => changed=false skip_reason: Conditional result was False TASK [ceph-osd : include_tasks scenarios/lvm-batch.yml] ***************************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/main.yml:66 Wednesday 17 April 2019 11:26:49 +0000 (0:00:00.112) 0:03:16.122 ******* included: /usr/share/ceph-ansible/roles/ceph-osd/tasks/scenarios/lvm-batch.yml for e23-h05-740xd.alias.bos.scalelab.redhat.com, e24-h05-740xd.alias.bos.scalelab.redhat.com, e24-h07-740xd.alias.bos.scalelab.redhat.com, e23-h07-740xd.alias.bos.scalelab.redhat.com TASK [ceph-osd : use ceph-volume lvm batch to create bluestore osds] **************************************************************************************************************************************************************************************** task path: /usr/share/ceph-ansible/roles/ceph-osd/tasks/scenarios/lvm-batch.yml:3 Wednesday 17 April 2019 11:26:49 +0000 (0:00:00.220) 0:03:16.343 ******* fatal: [e23-h05-740xd.alias.bos.scalelab.redhat.com]: FAILED! => changed=true cmd: - ceph-volume - --cluster - ceph - lvm - batch - --bluestore - --yes - --osds-per-device - '2' - /dev/nvme0n1 - /dev/nvme1n1 - /dev/nvme2n1 - /dev/nvme3n1 - /dev/nvme4n1 delta: '0:00:04.649959' end: '2019-04-17 11:26:54.722181' msg: non-zero return code rc: 1 start: '2019-04-17 11:26:50.072222' stderr: |- Traceback (most recent call last): File "/sbin/ceph-volume", line 11, in load_entry_point('ceph-volume==1.0.0', 'console_scripts', 'ceph-volume')() File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 38, in __init__ self.main(self.argv) File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 59, in newfunc return f(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 148, in main terminal.dispatch(self.mapper, subcommand_args) File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch instance.main() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/main.py", line 40, in main terminal.dispatch(self.mapper, self.argv) File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch instance.main() File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 325, in main self.execute() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 288, in execute self.strategy.execute() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/strategies/bluestore.py", line 124, in execute Create(command).main() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 69, in main self.create(args) File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 26, in create prepare_step.safe_prepare(args) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 219, in safe_prepare self.prepare() File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 320, in prepare osd_fsid, File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 119, in prepare_bluestore db=db File "/usr/lib/python3.6/site-packages/ceph_volume/util/prepare.py", line 430, in osd_mkfs_bluestore raise RuntimeError('Command failed with exit code %s: %s' % (returncode, ' '.join(command))) RuntimeError: Command failed with exit code 250: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 0 --monmap /var/lib/ceph/osd/ceph-0/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-0/ --osd-uuid 7046dd36-51c4-4a05-a38b-5d33236989a2 --setuser ceph --setgroup ceph stderr_lines: - 'Traceback (most recent call last):' - ' File "/sbin/ceph-volume", line 11, in ' - ' load_entry_point(''ceph-volume==1.0.0'', ''console_scripts'', ''ceph-volume'')()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 38, in __init__' - ' self.main(self.argv)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 59, in newfunc' - ' return f(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 148, in main' - ' terminal.dispatch(self.mapper, subcommand_args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch' - ' instance.main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/main.py", line 40, in main' - ' terminal.dispatch(self.mapper, self.argv)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch' - ' instance.main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 325, in main' - ' self.execute()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 288, in execute' - ' self.strategy.execute()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/strategies/bluestore.py", line 124, in execute' - ' Create(command).main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 69, in main' - ' self.create(args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 26, in create' - ' prepare_step.safe_prepare(args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 219, in safe_prepare' - ' self.prepare()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 320, in prepare' - ' osd_fsid,' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 119, in prepare_bluestore' - ' db=db' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/util/prepare.py", line 430, in osd_mkfs_bluestore' - ' raise RuntimeError(''Command failed with exit code %s: %s'' % (returncode, '' ''.join(command)))' - 'RuntimeError: Command failed with exit code 250: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 0 --monmap /var/lib/ceph/osd/ceph-0/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-0/ --osd-uuid 7046dd36-51c4-4a05-a38b-5d33236989a2 --setuser ceph --setgroup ceph' stdout: |- Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-f91f0e18-1d9d-4c83-a79f-693264bc8b66 /dev/nvme0n1 stdout: Physical volume "/dev/nvme0n1" successfully created. stdout: Volume group "ceph-f91f0e18-1d9d-4c83-a79f-693264bc8b66" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-07dc46aa-ec83-4e8b-8eaa-b5fc60202907 /dev/nvme1n1 stdout: Physical volume "/dev/nvme1n1" successfully created. stdout: Volume group "ceph-07dc46aa-ec83-4e8b-8eaa-b5fc60202907" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-f0636c1d-8985-4116-a8f6-42e021544ea7 /dev/nvme2n1 stdout: Physical volume "/dev/nvme2n1" successfully created. stdout: Volume group "ceph-f0636c1d-8985-4116-a8f6-42e021544ea7" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-78c93297-b062-437f-a147-d2a714f03d47 /dev/nvme3n1 stdout: Physical volume "/dev/nvme3n1" successfully created. stdout: Volume group "ceph-78c93297-b062-437f-a147-d2a714f03d47" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-5d70f4d4-2dc4-4f16-bfda-ccb774307118 /dev/nvme4n1 stdout: Physical volume "/dev/nvme4n1" successfully created. stdout: Volume group "ceph-5d70f4d4-2dc4-4f16-bfda-ccb774307118" successfully created Running command: /usr/sbin/lvcreate --yes -l 372 -n osd-data-7093b723-196d-4d03-991f-a1e284a10e72 ceph-f91f0e18-1d9d-4c83-a79f-693264bc8b66 stdout: Logical volume "osd-data-7093b723-196d-4d03-991f-a1e284a10e72" created. Running command: /usr/sbin/lvcreate --yes -l 372 -n osd-data-5689cb1c-757d-446f-9e4d-097256af1b64 ceph-f91f0e18-1d9d-4c83-a79f-693264bc8b66 stdout: Logical volume "osd-data-5689cb1c-757d-446f-9e4d-097256af1b64" created. Running command: /bin/ceph-authtool --gen-print-key Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring -i - osd new 7046dd36-51c4-4a05-a38b-5d33236989a2 Running command: /bin/ceph-authtool --gen-print-key Running command: /bin/mount -t tmpfs tmpfs /var/lib/ceph/osd/ceph-0 Running command: /usr/sbin/restorecon /var/lib/ceph/osd/ceph-0 Running command: /bin/chown -h ceph:ceph /dev/ceph-f91f0e18-1d9d-4c83-a79f-693264bc8b66/osd-data-7093b723-196d-4d03-991f-a1e284a10e72 Running command: /bin/chown -R ceph:ceph /dev/dm-3 Running command: /bin/ln -s /dev/ceph-f91f0e18-1d9d-4c83-a79f-693264bc8b66/osd-data-7093b723-196d-4d03-991f-a1e284a10e72 /var/lib/ceph/osd/ceph-0/block Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring mon getmap -o /var/lib/ceph/osd/ceph-0/activate.monmap stderr: got monmap epoch 1 Running command: /bin/ceph-authtool /var/lib/ceph/osd/ceph-0/keyring --create-keyring --name osd.0 --add-key AQB8DbdcM8BTGhAAxPfHMgI6egzPvInOeSBYKg== stdout: creating /var/lib/ceph/osd/ceph-0/keyring added entity osd.0 auth(key=AQB8DbdcM8BTGhAAxPfHMgI6egzPvInOeSBYKg==) Running command: /bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-0/keyring Running command: /bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-0/ Running command: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 0 --monmap /var/lib/ceph/osd/ceph-0/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-0/ --osd-uuid 7046dd36-51c4-4a05-a38b-5d33236989a2 --setuser ceph --setgroup ceph stdout: /usr/include/c++/8/bits/stl_vector.h:932: std::vector<_Tp, _Alloc>::reference std::vector<_Tp, _Alloc>::operator[](std::vector<_Tp, _Alloc>::size_type) [with _Tp = long unsigned int; _Alloc = mempool::pool_allocator<(mempool::pool_index_t)1, long unsigned int>; std::vector<_Tp, _Alloc>::reference = long unsigned int&; std::vector<_Tp, _Alloc>::size_type = long unsigned int]: Assertion '__builtin_expect(__n < this->size(), true)' failed. stderr: 2019-04-17 11:26:53.610 7fb951b4b080 -1 bluestore(/var/lib/ceph/osd/ceph-0/) _read_fsid unparsable uuid stderr: *** Caught signal (Aborted) ** stderr: in thread 7fb951b4b080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7fb94e6dcd80] stderr: 2: (gsignal()+0x10f) [0x7fb94d3b793f] stderr: 3: (abort()+0x127) [0x7fb94d3a1c95] stderr: 4: (()+0x65ca48) [0x56316f0c6a48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x56316f6c4a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x56316f56eae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x56316f5905b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x56316f5c39d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x56316f5d364f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x56316f0e6d7e] stderr: 11: (main()+0x1bd1) [0x56316efdf0c1] stderr: 12: (__libc_start_main()+0xf3) [0x7fb94d3a3813] stderr: 13: (_start()+0x2e) [0x56316f0c52fe] stderr: 2019-04-17 11:26:54.136 7fb951b4b080 -1 *** Caught signal (Aborted) ** stderr: in thread 7fb951b4b080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7fb94e6dcd80] stderr: 2: (gsignal()+0x10f) [0x7fb94d3b793f] stderr: 3: (abort()+0x127) [0x7fb94d3a1c95] stderr: 4: (()+0x65ca48) [0x56316f0c6a48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x56316f6c4a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x56316f56eae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x56316f5905b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x56316f5c39d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x56316f5d364f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x56316f0e6d7e] stderr: 11: (main()+0x1bd1) [0x56316efdf0c1] stderr: 12: (__libc_start_main()+0xf3) [0x7fb94d3a3813] stderr: 13: (_start()+0x2e) [0x56316f0c52fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. stderr: -485> 2019-04-17 11:26:53.610 7fb951b4b080 -1 bluestore(/var/lib/ceph/osd/ceph-0/) _read_fsid unparsable uuid stderr: 0> 2019-04-17 11:26:54.136 7fb951b4b080 -1 *** Caught signal (Aborted) ** stderr: in thread 7fb951b4b080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7fb94e6dcd80] stderr: 2: (gsignal()+0x10f) [0x7fb94d3b793f] stderr: 3: (abort()+0x127) [0x7fb94d3a1c95] stderr: 4: (()+0x65ca48) [0x56316f0c6a48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x56316f6c4a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x56316f56eae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x56316f5905b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x56316f5c39d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x56316f5d364f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x56316f0e6d7e] stderr: 11: (main()+0x1bd1) [0x56316efdf0c1] stderr: 12: (__libc_start_main()+0xf3) [0x7fb94d3a3813] stderr: 13: (_start()+0x2e) [0x56316f0c52fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. stderr: -485> 2019-04-17 11:26:53.610 7fb951b4b080 -1 bluestore(/var/lib/ceph/osd/ceph-0/) _read_fsid unparsable uuid stderr: 0> 2019-04-17 11:26:54.136 7fb951b4b080 -1 *** Caught signal (Aborted) ** stderr: in thread 7fb951b4b080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7fb94e6dcd80] stderr: 2: (gsignal()+0x10f) [0x7fb94d3b793f] stderr: 3: (abort()+0x127) [0x7fb94d3a1c95] stderr: 4: (()+0x65ca48) [0x56316f0c6a48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x56316f6c4a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x56316f56eae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x56316f5905b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x56316f5c39d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x56316f5d364f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x56316f0e6d7e] stderr: 11: (main()+0x1bd1) [0x56316efdf0c1] stderr: 12: (__libc_start_main()+0xf3) [0x7fb94d3a3813] stderr: 13: (_start()+0x2e) [0x56316f0c52fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. --> Was unable to complete a new OSD, will rollback changes Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring osd purge-new osd.0 --yes-i-really-mean-it stderr: purged osd.0 stdout_lines: fatal: [e24-h05-740xd.alias.bos.scalelab.redhat.com]: FAILED! => changed=true cmd: - ceph-volume - --cluster - ceph - lvm - batch - --bluestore - --yes - --osds-per-device - '2' - /dev/nvme0n1 - /dev/nvme1n1 - /dev/nvme2n1 - /dev/nvme3n1 - /dev/nvme4n1 delta: '0:00:04.733265' end: '2019-04-17 11:26:54.836611' msg: non-zero return code rc: 1 start: '2019-04-17 11:26:50.103346' stderr: |- Traceback (most recent call last): File "/sbin/ceph-volume", line 11, in load_entry_point('ceph-volume==1.0.0', 'console_scripts', 'ceph-volume')() File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 38, in __init__ self.main(self.argv) File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 59, in newfunc return f(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 148, in main terminal.dispatch(self.mapper, subcommand_args) File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch instance.main() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/main.py", line 40, in main terminal.dispatch(self.mapper, self.argv) File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch instance.main() File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 325, in main self.execute() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 288, in execute self.strategy.execute() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/strategies/bluestore.py", line 124, in execute Create(command).main() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 69, in main self.create(args) File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 26, in create prepare_step.safe_prepare(args) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 219, in safe_prepare self.prepare() File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 320, in prepare osd_fsid, File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 119, in prepare_bluestore db=db File "/usr/lib/python3.6/site-packages/ceph_volume/util/prepare.py", line 430, in osd_mkfs_bluestore raise RuntimeError('Command failed with exit code %s: %s' % (returncode, ' '.join(command))) RuntimeError: Command failed with exit code 250: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 1 --monmap /var/lib/ceph/osd/ceph-1/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-1/ --osd-uuid fee60f44-354c-47c1-b969-7514ea7f204b --setuser ceph --setgroup ceph stderr_lines: - 'Traceback (most recent call last):' - ' File "/sbin/ceph-volume", line 11, in ' - ' load_entry_point(''ceph-volume==1.0.0'', ''console_scripts'', ''ceph-volume'')()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 38, in __init__' - ' self.main(self.argv)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 59, in newfunc' - ' return f(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 148, in main' - ' terminal.dispatch(self.mapper, subcommand_args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch' - ' instance.main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/main.py", line 40, in main' - ' terminal.dispatch(self.mapper, self.argv)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch' - ' instance.main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 325, in main' - ' self.execute()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 288, in execute' - ' self.strategy.execute()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/strategies/bluestore.py", line 124, in execute' - ' Create(command).main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 69, in main' - ' self.create(args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 26, in create' - ' prepare_step.safe_prepare(args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 219, in safe_prepare' - ' self.prepare()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 320, in prepare' - ' osd_fsid,' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 119, in prepare_bluestore' - ' db=db' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/util/prepare.py", line 430, in osd_mkfs_bluestore' - ' raise RuntimeError(''Command failed with exit code %s: %s'' % (returncode, '' ''.join(command)))' - 'RuntimeError: Command failed with exit code 250: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 1 --monmap /var/lib/ceph/osd/ceph-1/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-1/ --osd-uuid fee60f44-354c-47c1-b969-7514ea7f204b --setuser ceph --setgroup ceph' stdout: |- Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-09a0711d-771f-46fb-85e2-604ab737b3f5 /dev/nvme0n1 stdout: Physical volume "/dev/nvme0n1" successfully created. stdout: Volume group "ceph-09a0711d-771f-46fb-85e2-604ab737b3f5" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-2f0ac98c-3a86-4fdd-b649-025c55dabaa7 /dev/nvme1n1 stdout: Physical volume "/dev/nvme1n1" successfully created. stdout: Volume group "ceph-2f0ac98c-3a86-4fdd-b649-025c55dabaa7" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-1b3458a7-dd39-4835-b91d-468bd6410ac9 /dev/nvme2n1 stdout: Physical volume "/dev/nvme2n1" successfully created. stdout: Volume group "ceph-1b3458a7-dd39-4835-b91d-468bd6410ac9" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-a2ecba32-fb40-48d5-a5de-d0368bc3a38e /dev/nvme3n1 stdout: Physical volume "/dev/nvme3n1" successfully created. stdout: Volume group "ceph-a2ecba32-fb40-48d5-a5de-d0368bc3a38e" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-1b5b13d3-af17-464f-9154-84aa3a0fd821 /dev/nvme4n1 stdout: Physical volume "/dev/nvme4n1" successfully created. stdout: Volume group "ceph-1b5b13d3-af17-464f-9154-84aa3a0fd821" successfully created Running command: /usr/sbin/lvcreate --yes -l 372 -n osd-data-b89d8a77-7cc6-4295-ba5c-b99508c6f221 ceph-09a0711d-771f-46fb-85e2-604ab737b3f5 stdout: Logical volume "osd-data-b89d8a77-7cc6-4295-ba5c-b99508c6f221" created. Running command: /usr/sbin/lvcreate --yes -l 372 -n osd-data-add7d28f-df6e-40db-9343-b2243694577a ceph-09a0711d-771f-46fb-85e2-604ab737b3f5 stdout: Logical volume "osd-data-add7d28f-df6e-40db-9343-b2243694577a" created. Running command: /bin/ceph-authtool --gen-print-key Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring -i - osd new fee60f44-354c-47c1-b969-7514ea7f204b Running command: /bin/ceph-authtool --gen-print-key Running command: /bin/mount -t tmpfs tmpfs /var/lib/ceph/osd/ceph-1 Running command: /usr/sbin/restorecon /var/lib/ceph/osd/ceph-1 Running command: /bin/chown -h ceph:ceph /dev/ceph-09a0711d-771f-46fb-85e2-604ab737b3f5/osd-data-b89d8a77-7cc6-4295-ba5c-b99508c6f221 Running command: /bin/chown -R ceph:ceph /dev/dm-3 Running command: /bin/ln -s /dev/ceph-09a0711d-771f-46fb-85e2-604ab737b3f5/osd-data-b89d8a77-7cc6-4295-ba5c-b99508c6f221 /var/lib/ceph/osd/ceph-1/block Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring mon getmap -o /var/lib/ceph/osd/ceph-1/activate.monmap stderr: got monmap epoch 1 Running command: /bin/ceph-authtool /var/lib/ceph/osd/ceph-1/keyring --create-keyring --name osd.1 --add-key AQB8Dbdcpa3NHhAA0OjZtSSuVGpWC8fhnsbnOg== stdout: creating /var/lib/ceph/osd/ceph-1/keyring added entity osd.1 auth(key=AQB8Dbdcpa3NHhAA0OjZtSSuVGpWC8fhnsbnOg==) Running command: /bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-1/keyring Running command: /bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-1/ Running command: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 1 --monmap /var/lib/ceph/osd/ceph-1/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-1/ --osd-uuid fee60f44-354c-47c1-b969-7514ea7f204b --setuser ceph --setgroup ceph stdout: /usr/include/c++/8/bits/stl_vector.h:932: std::vector<_Tp, _Alloc>::reference std::vector<_Tp, _Alloc>::operator[](std::vector<_Tp, _Alloc>::size_type) [with _Tp = long unsigned int; _Alloc = mempool::pool_allocator<(mempool::pool_index_t)1, long unsigned int>; std::vector<_Tp, _Alloc>::reference = long unsigned int&; std::vector<_Tp, _Alloc>::size_type = long unsigned int]: Assertion '__builtin_expect(__n < this->size(), true)' failed. stderr: 2019-04-17 11:26:53.712 7f51ef566080 -1 bluestore(/var/lib/ceph/osd/ceph-1/) _read_fsid unparsable uuid stderr: *** Caught signal (Aborted) ** stderr: in thread 7f51ef566080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7f51ec0f8d80] stderr: 2: (gsignal()+0x10f) [0x7f51eadd393f] stderr: 3: (abort()+0x127) [0x7f51eadbdc95] stderr: 4: (()+0x65ca48) [0x562bd3d29a48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x562bd4327a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x562bd41d1ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x562bd41f35b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x562bd42269d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x562bd423664f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x562bd3d49d7e] stderr: 11: (main()+0x1bd1) [0x562bd3c420c1] stderr: 12: (__libc_start_main()+0xf3) [0x7f51eadbf813] stderr: 13: (_start()+0x2e) [0x562bd3d282fe] stderr: 2019-04-17 11:26:54.237 7f51ef566080 -1 *** Caught signal (Aborted) ** stderr: in thread 7f51ef566080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7f51ec0f8d80] stderr: 2: (gsignal()+0x10f) [0x7f51eadd393f] stderr: 3: (abort()+0x127) [0x7f51eadbdc95] stderr: 4: (()+0x65ca48) [0x562bd3d29a48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x562bd4327a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x562bd41d1ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x562bd41f35b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x562bd42269d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x562bd423664f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x562bd3d49d7e] stderr: 11: (main()+0x1bd1) [0x562bd3c420c1] stderr: 12: (__libc_start_main()+0xf3) [0x7f51eadbf813] stderr: 13: (_start()+0x2e) [0x562bd3d282fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. stderr: -485> 2019-04-17 11:26:53.712 7f51ef566080 -1 bluestore(/var/lib/ceph/osd/ceph-1/) _read_fsid unparsable uuid stderr: 0> 2019-04-17 11:26:54.237 7f51ef566080 -1 *** Caught signal (Aborted) ** stderr: in thread 7f51ef566080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7f51ec0f8d80] stderr: 2: (gsignal()+0x10f) [0x7f51eadd393f] stderr: 3: (abort()+0x127) [0x7f51eadbdc95] stderr: 4: (()+0x65ca48) [0x562bd3d29a48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x562bd4327a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x562bd41d1ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x562bd41f35b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x562bd42269d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x562bd423664f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x562bd3d49d7e] stderr: 11: (main()+0x1bd1) [0x562bd3c420c1] stderr: 12: (__libc_start_main()+0xf3) [0x7f51eadbf813] stderr: 13: (_start()+0x2e) [0x562bd3d282fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. stderr: -485> 2019-04-17 11:26:53.712 7f51ef566080 -1 bluestore(/var/lib/ceph/osd/ceph-1/) _read_fsid unparsable uuid stderr: 0> 2019-04-17 11:26:54.237 7f51ef566080 -1 *** Caught signal (Aborted) ** stderr: in thread 7f51ef566080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7f51ec0f8d80] stderr: 2: (gsignal()+0x10f) [0x7f51eadd393f] stderr: 3: (abort()+0x127) [0x7f51eadbdc95] stderr: 4: (()+0x65ca48) [0x562bd3d29a48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x562bd4327a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x562bd41d1ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x562bd41f35b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x562bd42269d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x562bd423664f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x562bd3d49d7e] stderr: 11: (main()+0x1bd1) [0x562bd3c420c1] stderr: 12: (__libc_start_main()+0xf3) [0x7f51eadbf813] stderr: 13: (_start()+0x2e) [0x562bd3d282fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. --> Was unable to complete a new OSD, will rollback changes Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring osd purge-new osd.1 --yes-i-really-mean-it stderr: purged osd.1 stdout_lines: fatal: [e24-h07-740xd.alias.bos.scalelab.redhat.com]: FAILED! => changed=true cmd: - ceph-volume - --cluster - ceph - lvm - batch - --bluestore - --yes - --osds-per-device - '2' - /dev/nvme0n1 - /dev/nvme1n1 - /dev/nvme2n1 - /dev/nvme3n1 - /dev/nvme4n1 delta: '0:00:04.715823' end: '2019-04-17 11:26:54.844182' msg: non-zero return code rc: 1 start: '2019-04-17 11:26:50.128359' stderr: |- Traceback (most recent call last): File "/sbin/ceph-volume", line 11, in load_entry_point('ceph-volume==1.0.0', 'console_scripts', 'ceph-volume')() File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 38, in __init__ self.main(self.argv) File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 59, in newfunc return f(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 148, in main terminal.dispatch(self.mapper, subcommand_args) File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch instance.main() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/main.py", line 40, in main terminal.dispatch(self.mapper, self.argv) File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch instance.main() File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 325, in main self.execute() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 288, in execute self.strategy.execute() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/strategies/bluestore.py", line 124, in execute Create(command).main() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 69, in main self.create(args) File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 26, in create prepare_step.safe_prepare(args) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 219, in safe_prepare self.prepare() File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 320, in prepare osd_fsid, File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 119, in prepare_bluestore db=db File "/usr/lib/python3.6/site-packages/ceph_volume/util/prepare.py", line 430, in osd_mkfs_bluestore raise RuntimeError('Command failed with exit code %s: %s' % (returncode, ' '.join(command))) RuntimeError: Command failed with exit code 250: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 2 --monmap /var/lib/ceph/osd/ceph-2/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-2/ --osd-uuid 464753ad-e2c2-4237-8c5b-cd8c6b8e3961 --setuser ceph --setgroup ceph stderr_lines: - 'Traceback (most recent call last):' - ' File "/sbin/ceph-volume", line 11, in ' - ' load_entry_point(''ceph-volume==1.0.0'', ''console_scripts'', ''ceph-volume'')()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 38, in __init__' - ' self.main(self.argv)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 59, in newfunc' - ' return f(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 148, in main' - ' terminal.dispatch(self.mapper, subcommand_args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch' - ' instance.main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/main.py", line 40, in main' - ' terminal.dispatch(self.mapper, self.argv)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch' - ' instance.main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 325, in main' - ' self.execute()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 288, in execute' - ' self.strategy.execute()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/strategies/bluestore.py", line 124, in execute' - ' Create(command).main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 69, in main' - ' self.create(args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 26, in create' - ' prepare_step.safe_prepare(args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 219, in safe_prepare' - ' self.prepare()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 320, in prepare' - ' osd_fsid,' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 119, in prepare_bluestore' - ' db=db' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/util/prepare.py", line 430, in osd_mkfs_bluestore' - ' raise RuntimeError(''Command failed with exit code %s: %s'' % (returncode, '' ''.join(command)))' - 'RuntimeError: Command failed with exit code 250: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 2 --monmap /var/lib/ceph/osd/ceph-2/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-2/ --osd-uuid 464753ad-e2c2-4237-8c5b-cd8c6b8e3961 --setuser ceph --setgroup ceph' stdout: |- Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-3a1a279e-a499-40f4-82a4-fd0dadddac34 /dev/nvme0n1 stdout: Physical volume "/dev/nvme0n1" successfully created. stdout: Volume group "ceph-3a1a279e-a499-40f4-82a4-fd0dadddac34" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-c0803e93-bfae-48f2-b529-7ca6fb4bec3c /dev/nvme1n1 stdout: Physical volume "/dev/nvme1n1" successfully created. stdout: Volume group "ceph-c0803e93-bfae-48f2-b529-7ca6fb4bec3c" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-7286a18a-580d-45b7-8e60-98318c31f6ff /dev/nvme2n1 stdout: Physical volume "/dev/nvme2n1" successfully created. stdout: Volume group "ceph-7286a18a-580d-45b7-8e60-98318c31f6ff" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-1eb518fd-2e12-4606-b56b-545f8391de71 /dev/nvme3n1 stdout: Physical volume "/dev/nvme3n1" successfully created. stdout: Volume group "ceph-1eb518fd-2e12-4606-b56b-545f8391de71" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-e4f76a00-e549-43c8-821c-11b695e32c6b /dev/nvme4n1 stdout: Physical volume "/dev/nvme4n1" successfully created. stdout: Volume group "ceph-e4f76a00-e549-43c8-821c-11b695e32c6b" successfully created Running command: /usr/sbin/lvcreate --yes -l 372 -n osd-data-0287144b-e6cb-4051-8e98-5f5d951ec82a ceph-3a1a279e-a499-40f4-82a4-fd0dadddac34 stdout: Logical volume "osd-data-0287144b-e6cb-4051-8e98-5f5d951ec82a" created. Running command: /usr/sbin/lvcreate --yes -l 372 -n osd-data-a9b9e615-54d1-45e0-95a5-160e95e632e4 ceph-3a1a279e-a499-40f4-82a4-fd0dadddac34 stdout: Logical volume "osd-data-a9b9e615-54d1-45e0-95a5-160e95e632e4" created. Running command: /bin/ceph-authtool --gen-print-key Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring -i - osd new 464753ad-e2c2-4237-8c5b-cd8c6b8e3961 Running command: /bin/ceph-authtool --gen-print-key Running command: /bin/mount -t tmpfs tmpfs /var/lib/ceph/osd/ceph-2 Running command: /usr/sbin/restorecon /var/lib/ceph/osd/ceph-2 Running command: /bin/chown -h ceph:ceph /dev/ceph-3a1a279e-a499-40f4-82a4-fd0dadddac34/osd-data-0287144b-e6cb-4051-8e98-5f5d951ec82a Running command: /bin/chown -R ceph:ceph /dev/dm-3 Running command: /bin/ln -s /dev/ceph-3a1a279e-a499-40f4-82a4-fd0dadddac34/osd-data-0287144b-e6cb-4051-8e98-5f5d951ec82a /var/lib/ceph/osd/ceph-2/block Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring mon getmap -o /var/lib/ceph/osd/ceph-2/activate.monmap stderr: got monmap epoch 1 Running command: /bin/ceph-authtool /var/lib/ceph/osd/ceph-2/keyring --create-keyring --name osd.2 --add-key AQB8Dbdc1xaKHxAAksdJFccpjMn5F8sMB1MqJg== stdout: creating /var/lib/ceph/osd/ceph-2/keyring added entity osd.2 auth(key=AQB8Dbdc1xaKHxAAksdJFccpjMn5F8sMB1MqJg==) Running command: /bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-2/keyring Running command: /bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-2/ Running command: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 2 --monmap /var/lib/ceph/osd/ceph-2/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-2/ --osd-uuid 464753ad-e2c2-4237-8c5b-cd8c6b8e3961 --setuser ceph --setgroup ceph stdout: /usr/include/c++/8/bits/stl_vector.h:932: std::vector<_Tp, _Alloc>::reference std::vector<_Tp, _Alloc>::operator[](std::vector<_Tp, _Alloc>::size_type) [with _Tp = long unsigned int; _Alloc = mempool::pool_allocator<(mempool::pool_index_t)1, long unsigned int>; std::vector<_Tp, _Alloc>::reference = long unsigned int&; std::vector<_Tp, _Alloc>::size_type = long unsigned int]: Assertion '__builtin_expect(__n < this->size(), true)' failed. stderr: 2019-04-17 11:26:53.729 7feed02fe080 -1 bluestore(/var/lib/ceph/osd/ceph-2/) _read_fsid unparsable uuid stderr: *** Caught signal (Aborted) ** stderr: in thread 7feed02fe080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7feecce8fd80] stderr: 2: (gsignal()+0x10f) [0x7feecbb6a93f] stderr: 3: (abort()+0x127) [0x7feecbb54c95] stderr: 4: (()+0x65ca48) [0x56043354aa48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x560433b48a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x5604339f2ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x560433a145b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x560433a479d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x560433a5764f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x56043356ad7e] stderr: 11: (main()+0x1bd1) [0x5604334630c1] stderr: 12: (__libc_start_main()+0xf3) [0x7feecbb56813] stderr: 13: (_start()+0x2e) [0x5604335492fe] stderr: 2019-04-17 11:26:54.251 7feed02fe080 -1 *** Caught signal (Aborted) ** stderr: in thread 7feed02fe080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7feecce8fd80] stderr: 2: (gsignal()+0x10f) [0x7feecbb6a93f] stderr: 3: (abort()+0x127) [0x7feecbb54c95] stderr: 4: (()+0x65ca48) [0x56043354aa48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x560433b48a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x5604339f2ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x560433a145b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x560433a479d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x560433a5764f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x56043356ad7e] stderr: 11: (main()+0x1bd1) [0x5604334630c1] stderr: 12: (__libc_start_main()+0xf3) [0x7feecbb56813] stderr: 13: (_start()+0x2e) [0x5604335492fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. stderr: -485> 2019-04-17 11:26:53.729 7feed02fe080 -1 bluestore(/var/lib/ceph/osd/ceph-2/) _read_fsid unparsable uuid stderr: 0> 2019-04-17 11:26:54.251 7feed02fe080 -1 *** Caught signal (Aborted) ** stderr: in thread 7feed02fe080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7feecce8fd80] stderr: 2: (gsignal()+0x10f) [0x7feecbb6a93f] stderr: 3: (abort()+0x127) [0x7feecbb54c95] stderr: 4: (()+0x65ca48) [0x56043354aa48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x560433b48a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x5604339f2ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x560433a145b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x560433a479d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x560433a5764f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x56043356ad7e] stderr: 11: (main()+0x1bd1) [0x5604334630c1] stderr: 12: (__libc_start_main()+0xf3) [0x7feecbb56813] stderr: 13: (_start()+0x2e) [0x5604335492fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. stderr: -485> 2019-04-17 11:26:53.729 7feed02fe080 -1 bluestore(/var/lib/ceph/osd/ceph-2/) _read_fsid unparsable uuid stderr: 0> 2019-04-17 11:26:54.251 7feed02fe080 -1 *** Caught signal (Aborted) ** stderr: in thread 7feed02fe080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7feecce8fd80] stderr: 2: (gsignal()+0x10f) [0x7feecbb6a93f] stderr: 3: (abort()+0x127) [0x7feecbb54c95] stderr: 4: (()+0x65ca48) [0x56043354aa48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x560433b48a87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x5604339f2ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x560433a145b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x560433a479d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x560433a5764f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x56043356ad7e] stderr: 11: (main()+0x1bd1) [0x5604334630c1] stderr: 12: (__libc_start_main()+0xf3) [0x7feecbb56813] stderr: 13: (_start()+0x2e) [0x5604335492fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. --> Was unable to complete a new OSD, will rollback changes Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring osd purge-new osd.2 --yes-i-really-mean-it stderr: purged osd.2 stdout_lines: fatal: [e23-h07-740xd.alias.bos.scalelab.redhat.com]: FAILED! => changed=true cmd: - ceph-volume - --cluster - ceph - lvm - batch - --bluestore - --yes - --osds-per-device - '2' - /dev/nvme0n1 - /dev/nvme1n1 - /dev/nvme2n1 - /dev/nvme3n1 - /dev/nvme4n1 delta: '0:00:04.776752' end: '2019-04-17 11:26:54.932465' msg: non-zero return code rc: 1 start: '2019-04-17 11:26:50.155713' stderr: |- Traceback (most recent call last): File "/sbin/ceph-volume", line 11, in load_entry_point('ceph-volume==1.0.0', 'console_scripts', 'ceph-volume')() File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 38, in __init__ self.main(self.argv) File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 59, in newfunc return f(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 148, in main terminal.dispatch(self.mapper, subcommand_args) File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch instance.main() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/main.py", line 40, in main terminal.dispatch(self.mapper, self.argv) File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch instance.main() File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 325, in main self.execute() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 288, in execute self.strategy.execute() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/strategies/bluestore.py", line 124, in execute Create(command).main() File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 69, in main self.create(args) File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 26, in create prepare_step.safe_prepare(args) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 219, in safe_prepare self.prepare() File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root return func(*a, **kw) File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 320, in prepare osd_fsid, File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 119, in prepare_bluestore db=db File "/usr/lib/python3.6/site-packages/ceph_volume/util/prepare.py", line 430, in osd_mkfs_bluestore raise RuntimeError('Command failed with exit code %s: %s' % (returncode, ' '.join(command))) RuntimeError: Command failed with exit code 250: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 3 --monmap /var/lib/ceph/osd/ceph-3/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-3/ --osd-uuid f8639821-a1ba-4a47-9176-d5c22605eabf --setuser ceph --setgroup ceph stderr_lines: - 'Traceback (most recent call last):' - ' File "/sbin/ceph-volume", line 11, in ' - ' load_entry_point(''ceph-volume==1.0.0'', ''console_scripts'', ''ceph-volume'')()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 38, in __init__' - ' self.main(self.argv)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 59, in newfunc' - ' return f(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/main.py", line 148, in main' - ' terminal.dispatch(self.mapper, subcommand_args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch' - ' instance.main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/main.py", line 40, in main' - ' terminal.dispatch(self.mapper, self.argv)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/terminal.py", line 182, in dispatch' - ' instance.main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 325, in main' - ' self.execute()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/batch.py", line 288, in execute' - ' self.strategy.execute()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/strategies/bluestore.py", line 124, in execute' - ' Create(command).main()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 69, in main' - ' self.create(args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/create.py", line 26, in create' - ' prepare_step.safe_prepare(args)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 219, in safe_prepare' - ' self.prepare()' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/decorators.py", line 16, in is_root' - ' return func(*a, **kw)' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 320, in prepare' - ' osd_fsid,' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/devices/lvm/prepare.py", line 119, in prepare_bluestore' - ' db=db' - ' File "/usr/lib/python3.6/site-packages/ceph_volume/util/prepare.py", line 430, in osd_mkfs_bluestore' - ' raise RuntimeError(''Command failed with exit code %s: %s'' % (returncode, '' ''.join(command)))' - 'RuntimeError: Command failed with exit code 250: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 3 --monmap /var/lib/ceph/osd/ceph-3/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-3/ --osd-uuid f8639821-a1ba-4a47-9176-d5c22605eabf --setuser ceph --setgroup ceph' stdout: |- Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-73ef037d-8a98-4ca5-8464-732045d2dd55 /dev/nvme0n1 stdout: Physical volume "/dev/nvme0n1" successfully created. stdout: Volume group "ceph-73ef037d-8a98-4ca5-8464-732045d2dd55" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-88b1e7db-8d35-4665-b0d6-52f6633f8f6f /dev/nvme1n1 stdout: Physical volume "/dev/nvme1n1" successfully created. stdout: Volume group "ceph-88b1e7db-8d35-4665-b0d6-52f6633f8f6f" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-0ed1484b-a110-4fe8-afff-d57f0a4c00b8 /dev/nvme2n1 stdout: Physical volume "/dev/nvme2n1" successfully created. stdout: Volume group "ceph-0ed1484b-a110-4fe8-afff-d57f0a4c00b8" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-5b09220b-32f0-460c-87ea-78eb6e4d1df3 /dev/nvme3n1 stdout: Physical volume "/dev/nvme3n1" successfully created. stdout: Volume group "ceph-5b09220b-32f0-460c-87ea-78eb6e4d1df3" successfully created Running command: /usr/sbin/vgcreate -s 1G --force --yes ceph-bfe4fe33-8965-422b-9da3-3485d8932ffc /dev/nvme4n1 stdout: Physical volume "/dev/nvme4n1" successfully created. stdout: Volume group "ceph-bfe4fe33-8965-422b-9da3-3485d8932ffc" successfully created Running command: /usr/sbin/lvcreate --yes -l 372 -n osd-data-f524d080-f84b-4192-a0a1-8d3e6e1bf318 ceph-73ef037d-8a98-4ca5-8464-732045d2dd55 stdout: Logical volume "osd-data-f524d080-f84b-4192-a0a1-8d3e6e1bf318" created. Running command: /usr/sbin/lvcreate --yes -l 372 -n osd-data-ff72823f-8af7-4ace-960e-516077d08b52 ceph-73ef037d-8a98-4ca5-8464-732045d2dd55 stdout: Logical volume "osd-data-ff72823f-8af7-4ace-960e-516077d08b52" created. Running command: /bin/ceph-authtool --gen-print-key Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring -i - osd new f8639821-a1ba-4a47-9176-d5c22605eabf Running command: /bin/ceph-authtool --gen-print-key Running command: /bin/mount -t tmpfs tmpfs /var/lib/ceph/osd/ceph-3 Running command: /usr/sbin/restorecon /var/lib/ceph/osd/ceph-3 Running command: /bin/chown -h ceph:ceph /dev/ceph-73ef037d-8a98-4ca5-8464-732045d2dd55/osd-data-f524d080-f84b-4192-a0a1-8d3e6e1bf318 Running command: /bin/chown -R ceph:ceph /dev/dm-3 Running command: /bin/ln -s /dev/ceph-73ef037d-8a98-4ca5-8464-732045d2dd55/osd-data-f524d080-f84b-4192-a0a1-8d3e6e1bf318 /var/lib/ceph/osd/ceph-3/block Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring mon getmap -o /var/lib/ceph/osd/ceph-3/activate.monmap stderr: got monmap epoch 1 Running command: /bin/ceph-authtool /var/lib/ceph/osd/ceph-3/keyring --create-keyring --name osd.3 --add-key AQB8DbdcRgBoIhAAC03H8pE0BmtIXT+dV1KKvQ== stdout: creating /var/lib/ceph/osd/ceph-3/keyring added entity osd.3 auth(key=AQB8DbdcRgBoIhAAC03H8pE0BmtIXT+dV1KKvQ==) Running command: /bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-3/keyring Running command: /bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-3/ Running command: /bin/ceph-osd --cluster ceph --osd-objectstore bluestore --mkfs -i 3 --monmap /var/lib/ceph/osd/ceph-3/activate.monmap --keyfile - --osd-data /var/lib/ceph/osd/ceph-3/ --osd-uuid f8639821-a1ba-4a47-9176-d5c22605eabf --setuser ceph --setgroup ceph stdout: /usr/include/c++/8/bits/stl_vector.h:932: std::vector<_Tp, _Alloc>::reference std::vector<_Tp, _Alloc>::operator[](std::vector<_Tp, _Alloc>::size_type) [with _Tp = long unsigned int; _Alloc = mempool::pool_allocator<(mempool::pool_index_t)1, long unsigned int>; std::vector<_Tp, _Alloc>::reference = long unsigned int&; std::vector<_Tp, _Alloc>::size_type = long unsigned int]: Assertion '__builtin_expect(__n < this->size(), true)' failed. stderr: 2019-04-17 11:26:53.813 7f54cbe2c080 -1 bluestore(/var/lib/ceph/osd/ceph-3/) _read_fsid unparsable uuid stderr: *** Caught signal (Aborted) ** stderr: in thread 7f54cbe2c080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7f54c89bdd80] stderr: 2: (gsignal()+0x10f) [0x7f54c769893f] stderr: 3: (abort()+0x127) [0x7f54c7682c95] stderr: 4: (()+0x65ca48) [0x55f66ee8fa48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x55f66f48da87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x55f66f337ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x55f66f3595b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x55f66f38c9d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x55f66f39c64f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x55f66eeafd7e] stderr: 11: (main()+0x1bd1) [0x55f66eda80c1] stderr: 12: (__libc_start_main()+0xf3) [0x7f54c7684813] stderr: 13: (_start()+0x2e) [0x55f66ee8e2fe] stderr: 2019-04-17 11:26:54.335 7f54cbe2c080 -1 *** Caught signal (Aborted) ** stderr: in thread 7f54cbe2c080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7f54c89bdd80] stderr: 2: (gsignal()+0x10f) [0x7f54c769893f] stderr: 3: (abort()+0x127) [0x7f54c7682c95] stderr: 4: (()+0x65ca48) [0x55f66ee8fa48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x55f66f48da87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x55f66f337ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x55f66f3595b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x55f66f38c9d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x55f66f39c64f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x55f66eeafd7e] stderr: 11: (main()+0x1bd1) [0x55f66eda80c1] stderr: 12: (__libc_start_main()+0xf3) [0x7f54c7684813] stderr: 13: (_start()+0x2e) [0x55f66ee8e2fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. stderr: -485> 2019-04-17 11:26:53.813 7f54cbe2c080 -1 bluestore(/var/lib/ceph/osd/ceph-3/) _read_fsid unparsable uuid stderr: 0> 2019-04-17 11:26:54.335 7f54cbe2c080 -1 *** Caught signal (Aborted) ** stderr: in thread 7f54cbe2c080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7f54c89bdd80] stderr: 2: (gsignal()+0x10f) [0x7f54c769893f] stderr: 3: (abort()+0x127) [0x7f54c7682c95] stderr: 4: (()+0x65ca48) [0x55f66ee8fa48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x55f66f48da87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x55f66f337ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x55f66f3595b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x55f66f38c9d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x55f66f39c64f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x55f66eeafd7e] stderr: 11: (main()+0x1bd1) [0x55f66eda80c1] stderr: 12: (__libc_start_main()+0xf3) [0x7f54c7684813] stderr: 13: (_start()+0x2e) [0x55f66ee8e2fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. stderr: -485> 2019-04-17 11:26:53.813 7f54cbe2c080 -1 bluestore(/var/lib/ceph/osd/ceph-3/) _read_fsid unparsable uuid stderr: 0> 2019-04-17 11:26:54.335 7f54cbe2c080 -1 *** Caught signal (Aborted) ** stderr: in thread 7f54cbe2c080 thread_name:ceph-osd stderr: ceph version 14.2.0-142-g2f9c072 (2f9c0720b5aed4c9e25e8b050e71856df0a986ad) nautilus (stable) stderr: 1: (()+0x12d80) [0x7f54c89bdd80] stderr: 2: (gsignal()+0x10f) [0x7f54c769893f] stderr: 3: (abort()+0x127) [0x7f54c7682c95] stderr: 4: (()+0x65ca48) [0x55f66ee8fa48] stderr: 5: (BitmapAllocator::init_add_free(unsigned long, unsigned long)+0x857) [0x55f66f48da87] stderr: 6: (BlueStore::_open_alloc()+0x193) [0x55f66f337ae3] stderr: 7: (BlueStore::_open_db_and_around(bool)+0xa6) [0x55f66f3595b6] stderr: 8: (BlueStore::_fsck(bool, bool)+0x587) [0x55f66f38c9d7] stderr: 9: (BlueStore::mkfs()+0x141f) [0x55f66f39c64f] stderr: 10: (OSD::mkfs(CephContext*, ObjectStore*, uuid_d, int)+0x1ae) [0x55f66eeafd7e] stderr: 11: (main()+0x1bd1) [0x55f66eda80c1] stderr: 12: (__libc_start_main()+0xf3) [0x7f54c7684813] stderr: 13: (_start()+0x2e) [0x55f66ee8e2fe] stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. --> Was unable to complete a new OSD, will rollback changes Running command: /bin/ceph --cluster ceph --name client.bootstrap-osd --keyring /var/lib/ceph/bootstrap-osd/ceph.keyring osd purge-new osd.3 --yes-i-really-mean-it stderr: purged osd.3 stdout_lines: NO MORE HOSTS LEFT ****************************************************************************************************************************************************************************************************************************************** PLAY RECAP ************************************************************************************************************************************************************************************************************************************************** e23-h05-740xd.alias.bos.scalelab.redhat.com : ok=87 changed=11 unreachable=0 failed=1 e23-h07-740xd.alias.bos.scalelab.redhat.com : ok=85 changed=11 unreachable=0 failed=1 e24-h05-740xd.alias.bos.scalelab.redhat.com : ok=85 changed=10 unreachable=0 failed=1 e24-h07-740xd.alias.bos.scalelab.redhat.com : ok=85 changed=11 unreachable=0 failed=1 e24-h17-740xd.alias.bos.scalelab.redhat.com : ok=154 changed=20 unreachable=0 failed=0 e24-h19-740xd.alias.bos.scalelab.redhat.com : ok=141 changed=17 unreachable=0 failed=0 e24-h21-740xd.alias.bos.scalelab.redhat.com : ok=142 changed=18 unreachable=0 failed=0 INSTALLER STATUS ******************************************************************************************************************************************************************************************************************************************** Install Ceph Monitor : Complete (0:01:09) Install Ceph Manager : Complete (0:00:22) Install Ceph OSD : In Progress (0:01:11) This phase can be restarted by running: roles/ceph-osd/tasks/main.yml Wednesday 17 April 2019 11:26:54 +0000 (0:00:05.037) 0:03:21.381 ******* =============================================================================== ceph-common : install redhat ceph packages ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 18.79s /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:20 -------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-common : install redhat ceph packages ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 17.33s /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:20 -------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-config : generate ceph configuration file: ceph.conf ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 13.90s /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:83 ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ ceph-config : generate ceph configuration file: ceph.conf ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 10.75s /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:83 ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ ceph-common : install redhat dependencies ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 8.08s /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:2 --------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-common : install redhat dependencies ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 7.24s /usr/share/ceph-ansible/roles/ceph-common/tasks/installs/install_redhat_packages.yml:2 --------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-osd : use ceph-volume lvm batch to create bluestore osds ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 5.04s /usr/share/ceph-ansible/roles/ceph-osd/tasks/scenarios/lvm-batch.yml:3 ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- gather and delegate facts ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 3.88s /usr/share/ceph-ansible/site.yml:38 ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ ceph-mon : waiting for the monitor(s) to form the quorum... ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 3.39s /usr/share/ceph-ansible/roles/ceph-mon/tasks/ceph_keys.yml:2 ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-mon : fetch ceph initial keys ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 2.97s /usr/share/ceph-ansible/roles/ceph-mon/tasks/ceph_keys.yml:19 ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-mon : create ceph mgr keyring(s) ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 2.92s /usr/share/ceph-ansible/roles/ceph-mon/tasks/ceph_keys.yml:33 ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-mgr : install ceph-mgr package on RedHat or SUSE ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 2.50s /usr/share/ceph-ansible/roles/ceph-mgr/tasks/pre_requisite.yml:2 ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-validate : validate provided configuration ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 1.62s /usr/share/ceph-ansible/roles/ceph-validate/tasks/main.yml:2 ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-infra : disable chronyd ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 1.43s /usr/share/ceph-ansible/roles/ceph-infra/handlers/main.yml:9 ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-infra : start firewalld ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 1.39s /usr/share/ceph-ansible/roles/ceph-infra/tasks/configure_firewall.yml:16 ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-infra : disable ntpd ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 1.37s /usr/share/ceph-ansible/roles/ceph-infra/handlers/main.yml:2 ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-handler : check for a ceph mon socket ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 1.24s /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:2 -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-handler : check for a ceph osd socket ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 1.23s /usr/share/ceph-ansible/roles/ceph-handler/tasks/check_socket_non_container.yml:30 ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ceph-config : run 'ceph-volume lvm batch --report' to see how many osds are to be created ------------------------------------------------------------------------------------------------------------------------------------------------------------ 1.03s /usr/share/ceph-ansible/roles/ceph-config/tasks/main.yml:22 ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ ceph-osd : install dependencies ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 0.99s