Project

General

Profile

Bug #16686 ยป vstartrunner_output_with_master.log

complete vstart_runner output - Ramana Raja, 07/14/2016 04:58 PM

 
[root@bzn build]# LD_LIBRARY_PATH=/home/rraja/git/ceph/build/lib PYTHONPATH=/home/rraja/git/teuthology/:/home/rraja/git/ceph-qa-suite/:/home/rraja/git/ceph/src/pybind:/home/rraja/git/ceph/build/lib/cython_modules/lib.linux-x86_64-2.7/ python /home/rraja/git/ceph-qa-suite/tasks/cephfs/vstart_runner.py --interactive tasks.cephfs.test_volume_client.TestVolumeClient
2016-07-14 21:49:05,160.160 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:05,160.160 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:05,175.175 WARNING:__main__:Killing stray process 3133 ? 00:00:03 ceph-mds
2016-07-14 21:49:05,178.178 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.0', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,178.178 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.0', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,392.392 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:05,392.392 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:05,400.400 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:05,401.401 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.1', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,401.401 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.1', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,601.601 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:05,601.601 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:05,608.608 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:05,609.609 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.2', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,609.609 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.2', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,825.825 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:05,825.825 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:49:05,833.833 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.2
2016-07-14 21:49:05,833.833 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.3', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,834.834 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.3', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:06,038.038 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:06,038.038 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:49:06,046.046 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.3
2016-07-14 21:49:06,046.046 INFO:__main__:Discovered MDS IDs: ['a']
2016-07-14 21:49:06,048.048 INFO:__main__:run args=['./bin/ceph', 'tell', 'osd.*', 'injectargs', '--osd-mon-report-interval-max', '5']
2016-07-14 21:49:06,048.048 INFO:__main__:Running ['./bin/ceph', 'tell', 'osd.*', 'injectargs', '--osd-mon-report-interval-max', '5']
2016-07-14 21:49:06,188.188 INFO:__main__:Searching for existing instance osd_mon_report_interval_max/osd
2016-07-14 21:49:06,190.190 INFO:__main__:Searching for existing instance mds log max segments/mds
2016-07-14 21:49:06,190.190 INFO:__main__:Found string to replace at 1722
2016-07-14 21:49:06,190.190 INFO:__main__:Searching for existing instance osd_mon_report_interval_max/osd
2016-07-14 21:49:06,191.191 INFO:__main__:Searching for existing instance mds log max segments/mds
2016-07-14 21:49:06,192.192 INFO:__main__:Searching for existing instance mds root ino uid/global
2016-07-14 21:49:06,192.192 INFO:__main__:Searching for existing instance osd_mon_report_interval_max/osd
2016-07-14 21:49:06,193.193 INFO:__main__:Searching for existing instance mds log max segments/mds
2016-07-14 21:49:06,193.193 INFO:__main__:Searching for existing instance mds root ino uid/global
2016-07-14 21:49:06,193.193 INFO:__main__:Searching for existing instance mds root ino gid/global
2016-07-14 21:49:06,193.193 INFO:__main__:Searching for existing instance osd_mon_report_interval_max/osd
2016-07-14 21:49:06,194.194 INFO:__main__:Executing modules: ['tasks.cephfs.test_volume_client.TestVolumeClient']
2016-07-14 21:49:06,198.198 INFO:__main__:Loaded: [<unittest.suite.TestSuite tests=[<tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_15303>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_data_isolated>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_default_prefix>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_evict_client>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_idempotency>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_lifecycle>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_purge>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_readonly_authorization>]>]
2016-07-14 21:49:06,198.198 INFO:__main__:Disabling 0 tests because of is_for_teuthology or needs_trimming
2016-07-14 21:49:06,199.199 INFO:__main__:Starting test: test_15303 (tasks.cephfs.test_volume_client.TestVolumeClient)
2016-07-14 21:49:06,199.199 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:06,199.199 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:06,207.207 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:06,207.207 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:06,207.207 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:06,216.216 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:06,217.217 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:06,217.217 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:06,239.239 INFO:__main__:No match for mds a: PID TTY TIME CMD
1 ? 00:00:02 systemd
2 ? 00:00:00 kthreadd
3 ? 00:00:00 ksoftirqd/0
5 ? 00:00:00 kworker/0:0H
7 ? 00:00:36 rcu_sched
8 ? 00:00:00 rcu_bh
9 ? 00:00:15 rcuos/0
10 ? 00:00:00 rcuob/0
11 ? 00:00:00 migration/0
12 ? 00:00:00 watchdog/0
13 ? 00:00:00 watchdog/1
14 ? 00:00:00 migration/1
15 ? 00:00:00 ksoftirqd/1
17 ? 00:00:00 kworker/1:0H
18 ? 00:00:06 rcuos/1
19 ? 00:00:00 rcuob/1
20 ? 00:00:00 watchdog/2
21 ? 00:00:00 migration/2
22 ? 00:00:00 ksoftirqd/2
24 ? 00:00:00 kworker/2:0H
25 ? 00:00:14 rcuos/2
26 ? 00:00:00 rcuob/2
27 ? 00:00:00 watchdog/3
28 ? 00:00:00 migration/3
29 ? 00:00:00 ksoftirqd/3
31 ? 00:00:00 kworker/3:0H
32 ? 00:00:05 rcuos/3
33 ? 00:00:00 rcuob/3
34 ? 00:00:00 kdevtmpfs
35 ? 00:00:00 netns
36 ? 00:00:00 writeback
37 ? 00:00:00 ksmd
38 ? 00:00:00 khugepaged
39 ? 00:00:00 crypto
40 ? 00:00:00 kintegrityd
41 ? 00:00:00 bioset
42 ? 00:00:00 kblockd
43 ? 00:00:00 ata_sff
44 ? 00:00:00 md
45 ? 00:00:00 devfreq_wq
51 ? 00:00:13 kswapd0
52 ? 00:00:00 vmstat
93 ? 00:00:00 kthrotld
94 ? 00:00:00 acpi_thermal_pm
95 ? 00:00:00 scsi_eh_0
96 ? 00:00:00 scsi_tmf_0
97 ? 00:00:00 scsi_eh_1
98 ? 00:00:00 scsi_tmf_1
99 ? 00:00:00 scsi_eh_2
100 ? 00:00:00 scsi_tmf_2
104 ? 00:00:00 kpsmoused
106 ? 00:00:00 dm_bufio_cache
107 ? 00:00:00 ipv6_addrconf
138 ? 00:00:00 deferwq
140 ? 00:00:00 bioset
178 ? 00:00:00 kauditd
389 ? 00:00:01 kworker/0:1H
399 ? 00:00:00 kworker/1:1H
400 ? 00:00:00 kworker/3:1H
420 ? 00:00:00 rtsx_pci_sdmmc_
425 ? 00:00:00 kworker/2:1H
613 ? 00:00:00 kdmflush
621 ? 00:00:00 bioset
622 ? 00:00:00 kcryptd_io
623 ? 00:00:00 kcryptd
624 ? 00:00:08 dmcrypt_write
625 ? 00:00:00 bioset
688 ? 00:00:00 kdmflush
691 ? 00:00:00 bioset
700 ? 00:00:00 kdmflush
701 ? 00:00:00 bioset
724 ? 00:00:00 jbd2/dm-1-8
725 ? 00:00:00 ext4-rsv-conver
823 ? 00:00:02 systemd-journal
861 ? 00:00:00 rpciod
880 ? 00:00:00 systemd-udevd
910 ? 00:00:00 ktpacpid
918 ? 00:00:00 irq/47-mei_me
949 ? 00:00:00 kmemstick
968 ? 00:00:00 cfg80211
984 ? 00:00:07 irq/50-iwlwifi
999 ? 00:00:00 kworker/u17:0
1001 ? 00:00:00 hci0
1002 ? 00:00:00 hci0
1007 ? 00:00:00 kworker/u17:2
1015 ? 00:00:00 kdmflush
1016 ? 00:00:00 bioset
1018 ? 00:00:00 kvm-irqfd-clean
1071 ? 00:00:00 jbd2/sda1-8
1072 ? 00:00:00 ext4-rsv-conver
1077 ? 00:00:06 jbd2/dm-3-8
1078 ? 00:00:00 ext4-rsv-conver
1105 ? 00:00:00 auditd
1119 ? 00:00:00 audispd
1123 ? 00:00:00 sedispatch
1133 ? 00:00:00 bluetoothd
1137 ? 00:00:00 mcelog
1139 ? 00:00:39 rngd
1146 ? 00:00:00 gssproxy
1154 ? 00:00:00 ModemManager
1158 ? 00:00:04 udisksd
1159 ? 00:00:00 firewalld
1169 ? 00:00:00 systemd-logind
1173 ? 00:00:00 accounts-daemon
1177 ? 00:00:00 alsactl
1249 ? 00:00:00 abrtd
1262 ? 00:00:00 iprt-VBoxWQueue
1268 ? 00:00:00 iprt-VBoxTscThr
1281 ? 00:00:00 abrt-dump-journ
1286 ? 00:00:00 abrt-dump-journ
1324 ? 00:00:12 NetworkManager
1376 ? 00:00:00 libvirtd
1393 ? 00:00:00 crond
1396 ? 00:00:00 atd
1398 ? 00:00:00 gdm
1463 ? 00:00:01 wpa_supplicant
1509 ? 00:00:00 gdm-session-wor
1582 ? 00:00:00 upowerd
1680 ? 00:00:10 packagekitd
1839 ? 00:00:00 dhclient
1903 ? 00:00:00 gdm-session-wor
1938 tty2 00:00:00 xf86-video-inte
2067 ? 00:00:00 krfcommd
2206 ? 00:00:00 cupsd
2235 ? 00:00:00 fwupd
2448 ? 00:00:00 dhclient
2633 ? 00:00:00 kworker/0:1
3387 ? 00:00:00 kworker/3:1
3927 ? 00:00:04 kworker/u16:0
5168 ? 00:00:00 kworker/1:3
13278 ? 00:00:00 kworker/u16:5
13551 ? 00:00:00 kworker/u16:1
13663 ? 00:00:00 kworker/1:1
13668 ? 00:00:00 kworker/3:0
13681 ? 00:00:00 kworker/2:2
13694 ? 00:00:00 kworker/0:3
13724 ? 00:00:00 kworker/u16:4
13765 ? 00:00:00 kworker/1:2
13781 ? 00:00:00 kworker/2:0
13794 ? 00:00:00 kworker/3:3
13799 ? 00:00:00 kworker/0:2
14835 pts/1 00:00:00 sudo
14836 ? 00:00:00 fprintd
14848 pts/1 00:00:00 su
14853 pts/1 00:00:00 bash
14902 ? 00:00:00 kworker/2:3
14903 pts/1 00:00:00 python
15074 pts/1 00:00:00 ps
16120 ? 00:00:00 kworker/2:1
18809 ? 00:00:00 ceph-msgr
18811 ? 00:00:00 rbd
28322 ? 00:00:00 systemd
28332 ? 00:00:00 (sd-pam)
28799 ? 00:00:10 kworker/u16:2
29866 ? 00:00:00 kworker/3:2
30529 ? 00:00:00 dio/dm-3
31742 ? 00:00:00 kworker/0:0
32711 ? 00:00:00 kworker/1:0
2016-07-14 21:49:06,240.240 ERROR:__main__:tried to stop a non-running daemon
2016-07-14 21:49:06,240.240 INFO:__main__:run args=['./bin/ceph', 'fs', 'ls', '--format=json-pretty']
2016-07-14 21:49:06,240.240 INFO:__main__:Running ['./bin/ceph', 'fs', 'ls', '--format=json-pretty']
2016-07-14 21:49:06,428.428 INFO:__main__:run args=['./bin/ceph', 'fs', 'set', u'cephfs_a', 'cluster_down', 'true']
2016-07-14 21:49:06,428.428 INFO:__main__:Running ['./bin/ceph', 'fs', 'set', u'cephfs_a', 'cluster_down', 'true']
2016-07-14 21:49:07,071.071 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', u'cephfs_a', '--format=json-pretty']
2016-07-14 21:49:07,071.071 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', u'cephfs_a', '--format=json-pretty']
2016-07-14 21:49:07,257.257 INFO:__main__:run args=['./bin/ceph', 'mds', 'fail', '4113']
2016-07-14 21:49:07,257.257 INFO:__main__:Running ['./bin/ceph', 'mds', 'fail', '4113']
2016-07-14 21:49:08,055.055 INFO:__main__:run args=['./bin/ceph', 'fs', 'rm', u'cephfs_a', '--yes-i-really-mean-it']
2016-07-14 21:49:08,055.055 INFO:__main__:Running ['./bin/ceph', 'fs', 'rm', u'cephfs_a', '--yes-i-really-mean-it']
2016-07-14 21:49:09,055.055 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_metadata_a', u'cephfs_metadata_a', '--yes-i-really-really-mean-it']
2016-07-14 21:49:09,055.055 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_metadata_a', u'cephfs_metadata_a', '--yes-i-really-really-mean-it']
2016-07-14 21:49:10,155.155 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_data_a', u'cephfs_data_a', '--yes-i-really-really-mean-it']
2016-07-14 21:49:10,155.155 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_data_a', u'cephfs_data_a', '--yes-i-really-really-mean-it']
2016-07-14 21:49:11,269.269 INFO:__main__:run args=['./bin/ceph', 'daemon', 'osd.0', 'config', 'get', 'osd_mon_report_interval_max']
2016-07-14 21:49:11,270.270 INFO:__main__:Running ['./bin/ceph', 'daemon', 'osd.0', 'config', 'get', 'osd_mon_report_interval_max']
2016-07-14 21:49:11,385.385 INFO:tasks.cephfs.filesystem:_json_asok output: {
"osd_mon_report_interval_max": "5"
}

2016-07-14 21:49:11,385.385 INFO:__main__:run args=['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:49:11,385.385 INFO:__main__:Running ['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:49:11,567.567 INFO:__main__:run args=['./bin/ceph', 'osd', 'blacklist', 'clear']
2016-07-14 21:49:11,567.567 INFO:__main__:Running ['./bin/ceph', 'osd', 'blacklist', 'clear']
2016-07-14 21:49:12,259.259 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.0', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,259.259 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.0', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,493.493 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.1', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,494.494 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.1', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,754.754 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.2', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,754.754 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.2', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,972.972 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.3', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,972.972 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.3', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:13,187.187 INFO:tasks.cephfs.cephfs_test_case:['0', '1', '2', '3']
2016-07-14 21:49:13,187.187 INFO:__main__:run args=['./bin/ceph', 'auth', 'list', '--format=json-pretty']
2016-07-14 21:49:13,188.188 INFO:__main__:Running ['./bin/ceph', 'auth', 'list', '--format=json-pretty']
2016-07-14 21:49:13,380.380 INFO:tasks.cephfs.filesystem:Creating filesystem 'cephfs'
2016-07-14 21:49:13,381.381 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_min_per_osd']
2016-07-14 21:49:13,381.381 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_min_per_osd']
2016-07-14 21:49:13,489.489 INFO:tasks.cephfs.filesystem:_json_asok output: {
"mon_pg_warn_min_per_osd": "3"
}

2016-07-14 21:49:13,489.489 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_metadata', '9']
2016-07-14 21:49:13,490.490 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_metadata', '9']
2016-07-14 21:49:14,342.342 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_data', '9']
2016-07-14 21:49:14,342.342 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_data', '9']
2016-07-14 21:49:15,351.351 INFO:__main__:run args=['./bin/ceph', 'fs', 'new', 'cephfs', 'cephfs_metadata', 'cephfs_data']
2016-07-14 21:49:15,351.351 INFO:__main__:Running ['./bin/ceph', 'fs', 'new', 'cephfs', 'cephfs_metadata', 'cephfs_data']
2016-07-14 21:49:16,416.416 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:16,416.416 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:16,440.440 INFO:__main__:No match for mds a: PID TTY TIME CMD
1 ? 00:00:02 systemd
2 ? 00:00:00 kthreadd
3 ? 00:00:00 ksoftirqd/0
5 ? 00:00:00 kworker/0:0H
7 ? 00:00:36 rcu_sched
8 ? 00:00:00 rcu_bh
9 ? 00:00:15 rcuos/0
10 ? 00:00:00 rcuob/0
11 ? 00:00:00 migration/0
12 ? 00:00:00 watchdog/0
13 ? 00:00:00 watchdog/1
14 ? 00:00:00 migration/1
15 ? 00:00:00 ksoftirqd/1
17 ? 00:00:00 kworker/1:0H
18 ? 00:00:06 rcuos/1
19 ? 00:00:00 rcuob/1
20 ? 00:00:00 watchdog/2
21 ? 00:00:00 migration/2
22 ? 00:00:00 ksoftirqd/2
24 ? 00:00:00 kworker/2:0H
25 ? 00:00:14 rcuos/2
26 ? 00:00:00 rcuob/2
27 ? 00:00:00 watchdog/3
28 ? 00:00:00 migration/3
29 ? 00:00:00 ksoftirqd/3
31 ? 00:00:00 kworker/3:0H
32 ? 00:00:05 rcuos/3
33 ? 00:00:00 rcuob/3
34 ? 00:00:00 kdevtmpfs
35 ? 00:00:00 netns
36 ? 00:00:00 writeback
37 ? 00:00:00 ksmd
38 ? 00:00:00 khugepaged
39 ? 00:00:00 crypto
40 ? 00:00:00 kintegrityd
41 ? 00:00:00 bioset
42 ? 00:00:00 kblockd
43 ? 00:00:00 ata_sff
44 ? 00:00:00 md
45 ? 00:00:00 devfreq_wq
51 ? 00:00:13 kswapd0
52 ? 00:00:00 vmstat
93 ? 00:00:00 kthrotld
94 ? 00:00:00 acpi_thermal_pm
95 ? 00:00:00 scsi_eh_0
96 ? 00:00:00 scsi_tmf_0
97 ? 00:00:00 scsi_eh_1
98 ? 00:00:00 scsi_tmf_1
99 ? 00:00:00 scsi_eh_2
100 ? 00:00:00 scsi_tmf_2
104 ? 00:00:00 kpsmoused
106 ? 00:00:00 dm_bufio_cache
107 ? 00:00:00 ipv6_addrconf
138 ? 00:00:00 deferwq
140 ? 00:00:00 bioset
178 ? 00:00:00 kauditd
389 ? 00:00:01 kworker/0:1H
399 ? 00:00:00 kworker/1:1H
400 ? 00:00:00 kworker/3:1H
420 ? 00:00:00 rtsx_pci_sdmmc_
425 ? 00:00:00 kworker/2:1H
613 ? 00:00:00 kdmflush
621 ? 00:00:00 bioset
622 ? 00:00:00 kcryptd_io
623 ? 00:00:00 kcryptd
624 ? 00:00:08 dmcrypt_write
625 ? 00:00:00 bioset
688 ? 00:00:00 kdmflush
691 ? 00:00:00 bioset
700 ? 00:00:00 kdmflush
701 ? 00:00:00 bioset
724 ? 00:00:00 jbd2/dm-1-8
725 ? 00:00:00 ext4-rsv-conver
823 ? 00:00:02 systemd-journal
861 ? 00:00:00 rpciod
880 ? 00:00:00 systemd-udevd
910 ? 00:00:00 ktpacpid
918 ? 00:00:00 irq/47-mei_me
949 ? 00:00:00 kmemstick
968 ? 00:00:00 cfg80211
984 ? 00:00:07 irq/50-iwlwifi
999 ? 00:00:00 kworker/u17:0
1001 ? 00:00:00 hci0
1002 ? 00:00:00 hci0
1007 ? 00:00:00 kworker/u17:2
1015 ? 00:00:00 kdmflush
1016 ? 00:00:00 bioset
1018 ? 00:00:00 kvm-irqfd-clean
1071 ? 00:00:00 jbd2/sda1-8
1072 ? 00:00:00 ext4-rsv-conver
1077 ? 00:00:06 jbd2/dm-3-8
1078 ? 00:00:00 ext4-rsv-conver
1105 ? 00:00:00 auditd
1119 ? 00:00:00 audispd
1123 ? 00:00:00 sedispatch
1133 ? 00:00:00 bluetoothd
1137 ? 00:00:00 mcelog
1139 ? 00:00:39 rngd
1146 ? 00:00:00 gssproxy
1154 ? 00:00:00 ModemManager
1158 ? 00:00:04 udisksd
1159 ? 00:00:00 firewalld
1169 ? 00:00:00 systemd-logind
1173 ? 00:00:00 accounts-daemon
1177 ? 00:00:00 alsactl
1249 ? 00:00:00 abrtd
1262 ? 00:00:00 iprt-VBoxWQueue
1268 ? 00:00:00 iprt-VBoxTscThr
1281 ? 00:00:00 abrt-dump-journ
1286 ? 00:00:00 abrt-dump-journ
1324 ? 00:00:12 NetworkManager
1376 ? 00:00:00 libvirtd
1393 ? 00:00:00 crond
1396 ? 00:00:00 atd
1398 ? 00:00:00 gdm
1463 ? 00:00:01 wpa_supplicant
1509 ? 00:00:00 gdm-session-wor
1582 ? 00:00:00 upowerd
1680 ? 00:00:10 packagekitd
1839 ? 00:00:00 dhclient
1903 ? 00:00:00 gdm-session-wor
1938 tty2 00:00:00 xf86-video-inte
2067 ? 00:00:00 krfcommd
2206 ? 00:00:00 cupsd
2235 ? 00:00:00 fwupd
2448 ? 00:00:00 dhclient
2633 ? 00:00:00 kworker/0:1
3387 ? 00:00:00 kworker/3:1
3927 ? 00:00:04 kworker/u16:0
5168 ? 00:00:00 kworker/1:3
13278 ? 00:00:00 kworker/u16:5
13551 ? 00:00:00 kworker/u16:1
13663 ? 00:00:00 kworker/1:1
13668 ? 00:00:00 kworker/3:0
13681 ? 00:00:00 kworker/2:2
13694 ? 00:00:00 kworker/0:3
13724 ? 00:00:00 kworker/u16:4
13765 ? 00:00:00 kworker/1:2
13781 ? 00:00:00 kworker/2:0
13794 ? 00:00:00 kworker/3:3
13799 ? 00:00:00 kworker/0:2
14835 pts/1 00:00:00 sudo
14836 ? 00:00:00 fprintd
14848 pts/1 00:00:00 su
14853 pts/1 00:00:00 bash
14902 ? 00:00:00 kworker/2:3
14903 pts/1 00:00:00 python
15599 pts/1 00:00:00 ps
16120 ? 00:00:00 kworker/2:1
18809 ? 00:00:00 ceph-msgr
18811 ? 00:00:00 rbd
28322 ? 00:00:00 systemd
28332 ? 00:00:00 (sd-pam)
28799 ? 00:00:10 kworker/u16:2
29866 ? 00:00:00 kworker/3:2
30529 ? 00:00:00 dio/dm-3
31742 ? 00:00:00 kworker/0:0
32711 ? 00:00:00 kworker/1:0
2016-07-14 21:49:16,441.441 INFO:__main__:run args=['./bin/./ceph-mds', '-i', 'a']
2016-07-14 21:49:16,441.441 INFO:__main__:Running ['./bin/./ceph-mds', '-i', 'a']
2016-07-14 21:49:16,471.471 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:16,472.472 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:16,667.667 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4123}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 3, u'failed': [], u'epoch': 10, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [4], u'info': {u'gid_4123': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 10, u'standby_replay': False, u'state_seq': 1, u'standby_for_fscid': -1, u'state': u'up:creating', u'gid': 4123, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/14766'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:15.555766', u'enabled': True, u'modified': u'2016-07-14 21:49:15.555766', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:16,668.668 WARNING:tasks.cephfs.filesystem:Unhealthy mds state gid_4123:up:creating
2016-07-14 21:49:17,668.668 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:17,668.668 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:17,866.866 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4123}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 3, u'failed': [], u'epoch': 11, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [4], u'info': {u'gid_4123': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 10, u'standby_replay': False, u'state_seq': 4, u'standby_for_fscid': -1, u'state': u'up:active', u'gid': 4123, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/14766'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:15.555766', u'enabled': True, u'modified': u'2016-07-14 21:49:15.555766', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:17,866.866 INFO:tasks.cephfs.filesystem:are_daemons_healthy: 1/1
2016-07-14 21:49:17,867.867 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mds.a', 'status']
2016-07-14 21:49:17,867.867 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mds.a', 'status']
2016-07-14 21:49:17,968.968 INFO:tasks.cephfs.filesystem:_json_asok output: {
"cluster_fsid": "ae66e242-d103-408a-b4cf-f160f95d365c",
"whoami": 0,
"want_state": "up:active",
"state": "up:active",
"mdsmap_epoch": 11,
"osdmap_epoch": 17,
"osdmap_epoch_barrier": 17
}

2016-07-14 21:49:17,969.969 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:17,969.969 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:17,983.983 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:17,984.984 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:17,984.984 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:17,991.991 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:17,992.992 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:18,007.007 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:18,008.008 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:18,019.019 INFO:__main__:Pre-mount connections: [43, 47]
2016-07-14 21:49:18,019.019 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:18,020.020 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:18,026.026 INFO:__main__:Mounting client.0 with pid 15669
2016-07-14 21:49:18,026.026 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:18,027.027 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:18,043.043 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:18,044.044 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,059.059 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,060.060 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:19,069.069 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,069.069 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,080.080 INFO:__main__:Post-mount connections: [43, 47, 48]
2016-07-14 21:49:19,080.080 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:19,081.081 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:19,089.089 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:19,089.089 INFO:__main__:run args=['sudo', 'chmod', '1777', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:19,089.089 INFO:__main__:Running ['chmod', '1777', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:19,101.101 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,101.101 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:19,112.112 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:19,112.112 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,113.113 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,121.121 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,121.121 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:19,133.133 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,134.134 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,143.143 INFO:__main__:Pre-mount connections: [43, 47, 48]
2016-07-14 21:49:19,143.143 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,144.144 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,152.152 INFO:__main__:Mounting client.1 with pid 15711
2016-07-14 21:49:19,152.152 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,152.152 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:19,165.165 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,166.166 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:20,176.176 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:20,176.176 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:20,185.185 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:20,185.185 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:20,193.193 INFO:__main__:Post-mount connections: [43, 47, 48, 49]
2016-07-14 21:49:20,194.194 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:20,194.194 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:20,202.202 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:20,202.202 INFO:__main__:run args=['sudo', 'chmod', '1777', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:20,202.202 INFO:__main__:Running ['chmod', '1777', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:20,218.218 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,218.218 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,229.229 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:20,229.229 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:20,229.229 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,230.230 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,260.260 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,261.261 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,270.270 INFO:teuthology.orchestra.run:waiting for 900
2016-07-14 21:49:26,277.277 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:26,277.277 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:26,286.286 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,287.287 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,294.294 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:26,294.294 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:26,294.294 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,294.294 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,323.323 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,323.323 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,333.333 INFO:teuthology.orchestra.run:waiting for 900
2016-07-14 21:49:32,338.338 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:32,339.339 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:32,346.346 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:32,347.347 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:49:32,358.358 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.2
2016-07-14 21:49:32,359.359 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:32,359.359 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.2']
rmdir: failed to remove '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:49:32,371.371 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:32,372.372 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:49:32,382.382 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.3
2016-07-14 21:49:32,383.383 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:32,384.384 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.3']
rmdir: failed to remove '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:49:32,392.392 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:32,392.392 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:32,405.405 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,405.405 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:32,419.419 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,420.420 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,427.427 INFO:__main__:Pre-mount connections: [43, 47]
2016-07-14 21:49:32,427.427 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:32,427.427 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:32,440.440 INFO:__main__:Mounting client.0 with pid 15797
2016-07-14 21:49:32,441.441 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,441.441 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:32,456.456 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,456.456 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,469.469 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,469.469 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:33,477.477 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,477.477 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,485.485 INFO:__main__:Post-mount connections: [43, 47, 48]
2016-07-14 21:49:33,486.486 INFO:__main__:run args=['mkdir', 'parent1']
2016-07-14 21:49:33,486.486 INFO:__main__:Running ['mkdir', 'parent1']
2016-07-14 21:49:33,520.520 INFO:__main__:run args=['mkdir', 'parent2']
2016-07-14 21:49:33,520.520 INFO:__main__:Running ['mkdir', 'parent2']
2016-07-14 21:49:33,531.531 INFO:__main__:run args=['mkdir', 'parent1/mydir']
2016-07-14 21:49:33,531.531 INFO:__main__:Running ['mkdir', 'parent1/mydir']
2016-07-14 21:49:33,557.557 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:33,557.557 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:33,564.564 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,564.564 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:33,576.576 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,576.576 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,588.588 INFO:__main__:Pre-mount connections: [43, 47, 48]
2016-07-14 21:49:33,589.589 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:33,589.589 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:33,596.596 INFO:__main__:Mounting client.1 with pid 15840
2016-07-14 21:49:33,596.596 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,596.596 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:33,612.612 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,612.612 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:34,626.626 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:34,626.626 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:34,635.635 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:34,635.635 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:34,643.643 INFO:__main__:Post-mount connections: [43, 47, 48, 49]
2016-07-14 21:49:34,644.644 INFO:__main__:run args=['touch', 'parent1/mydir/afile']
2016-07-14 21:49:34,644.644 INFO:__main__:Running ['touch', 'parent1/mydir/afile']
2016-07-14 21:49:34,693.693 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,694.694 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,706.706 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:34,706.706 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:34,706.706 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,706.706 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,728.728 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,729.729 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,742.742 INFO:teuthology.orchestra.run:waiting for 900
2016-07-14 21:49:40,745.745 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,745.745 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,752.752 INFO:__main__:run args=['ls', 'parent1/mydir']
2016-07-14 21:49:40,752.752 INFO:__main__:Running ['ls', 'parent1/mydir']
2016-07-14 21:49:40,761.761 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,762.762 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,769.769 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:40,769.769 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:40,769.769 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,769.769 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,801.801 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,801.801 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,816.816 INFO:__main__:kill
2016-07-14 21:49:40,816.816 INFO:__main__:kill: killing pid 15797 (['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0'])
ceph-fuse[15797]: starting ceph client
2016-07-14 21:49:32.455607 7fa938e2cf40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:32.455746 7fa938e2cf40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:32.457752 7fa938e2cf40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:32.458410 7fa938e2cf40 -1 init, newargv = 0x9b09080 newargc=11
ceph-fuse[15797]: starting fuse
ceph-fuse[15797]: fuse finished with error 0 and tester_r 0
2016-07-14 21:49:40,818.818 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,818.818 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,829.829 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,829.829 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:40,837.837 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:40,837.837 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,838.838 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,850.850 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:40,850.850 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:49:40,861.861 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.2
2016-07-14 21:49:40,861.861 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:40,862.862 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:40,869.869 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:40,869.869 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:49:40,881.881 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.3
2016-07-14 21:49:40,881.881 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:40,881.881 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:40,889.889 INFO:__main__:test_15303 (tasks.cephfs.test_volume_client.TestVolumeClient) ... ok
2016-07-14 21:49:40,890.890 INFO:__main__:Stopped test: test_15303 (tasks.cephfs.test_volume_client.TestVolumeClient) in 34.690646s
2016-07-14 21:49:40,891.891 INFO:__main__:Starting test: test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient)
2016-07-14 21:49:40,891.891 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,892.892 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:40,901.901 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:40,901.901 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,901.901 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:40,916.916 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:40,916.916 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:40,917.917 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:40,947.947 INFO:__main__:No match for mds a: PID TTY TIME CMD
1 ? 00:00:02 systemd
2 ? 00:00:00 kthreadd
3 ? 00:00:00 ksoftirqd/0
5 ? 00:00:00 kworker/0:0H
7 ? 00:00:36 rcu_sched
8 ? 00:00:00 rcu_bh
9 ? 00:00:15 rcuos/0
10 ? 00:00:00 rcuob/0
11 ? 00:00:00 migration/0
12 ? 00:00:00 watchdog/0
13 ? 00:00:00 watchdog/1
14 ? 00:00:00 migration/1
15 ? 00:00:00 ksoftirqd/1
17 ? 00:00:00 kworker/1:0H
18 ? 00:00:06 rcuos/1
19 ? 00:00:00 rcuob/1
20 ? 00:00:00 watchdog/2
21 ? 00:00:00 migration/2
22 ? 00:00:00 ksoftirqd/2
24 ? 00:00:00 kworker/2:0H
25 ? 00:00:14 rcuos/2
26 ? 00:00:00 rcuob/2
27 ? 00:00:00 watchdog/3
28 ? 00:00:00 migration/3
29 ? 00:00:00 ksoftirqd/3
31 ? 00:00:00 kworker/3:0H
32 ? 00:00:05 rcuos/3
33 ? 00:00:00 rcuob/3
34 ? 00:00:00 kdevtmpfs
35 ? 00:00:00 netns
36 ? 00:00:00 writeback
37 ? 00:00:00 ksmd
38 ? 00:00:00 khugepaged
39 ? 00:00:00 crypto
40 ? 00:00:00 kintegrityd
41 ? 00:00:00 bioset
42 ? 00:00:00 kblockd
43 ? 00:00:00 ata_sff
44 ? 00:00:00 md
45 ? 00:00:00 devfreq_wq
51 ? 00:00:13 kswapd0
52 ? 00:00:00 vmstat
93 ? 00:00:00 kthrotld
94 ? 00:00:00 acpi_thermal_pm
95 ? 00:00:00 scsi_eh_0
96 ? 00:00:00 scsi_tmf_0
97 ? 00:00:00 scsi_eh_1
98 ? 00:00:00 scsi_tmf_1
99 ? 00:00:00 scsi_eh_2
100 ? 00:00:00 scsi_tmf_2
104 ? 00:00:00 kpsmoused
106 ? 00:00:00 dm_bufio_cache
107 ? 00:00:00 ipv6_addrconf
138 ? 00:00:00 deferwq
140 ? 00:00:00 bioset
178 ? 00:00:00 kauditd
389 ? 00:00:01 kworker/0:1H
399 ? 00:00:00 kworker/1:1H
400 ? 00:00:00 kworker/3:1H
420 ? 00:00:00 rtsx_pci_sdmmc_
425 ? 00:00:00 kworker/2:1H
613 ? 00:00:00 kdmflush
621 ? 00:00:00 bioset
622 ? 00:00:00 kcryptd_io
623 ? 00:00:00 kcryptd
624 ? 00:00:08 dmcrypt_write
625 ? 00:00:00 bioset
688 ? 00:00:00 kdmflush
691 ? 00:00:00 bioset
700 ? 00:00:00 kdmflush
701 ? 00:00:00 bioset
724 ? 00:00:00 jbd2/dm-1-8
725 ? 00:00:00 ext4-rsv-conver
823 ? 00:00:02 systemd-journal
861 ? 00:00:00 rpciod
880 ? 00:00:00 systemd-udevd
910 ? 00:00:00 ktpacpid
918 ? 00:00:00 irq/47-mei_me
949 ? 00:00:00 kmemstick
968 ? 00:00:00 cfg80211
984 ? 00:00:07 irq/50-iwlwifi
999 ? 00:00:00 kworker/u17:0
1001 ? 00:00:00 hci0
1002 ? 00:00:00 hci0
1007 ? 00:00:00 kworker/u17:2
1015 ? 00:00:00 kdmflush
1016 ? 00:00:00 bioset
1018 ? 00:00:00 kvm-irqfd-clean
1071 ? 00:00:00 jbd2/sda1-8
1072 ? 00:00:00 ext4-rsv-conver
1077 ? 00:00:06 jbd2/dm-3-8
1078 ? 00:00:00 ext4-rsv-conver
1105 ? 00:00:00 auditd
1119 ? 00:00:00 audispd
1123 ? 00:00:00 sedispatch
1133 ? 00:00:00 bluetoothd
1137 ? 00:00:00 mcelog
1139 ? 00:00:39 rngd
1146 ? 00:00:00 gssproxy
1154 ? 00:00:00 ModemManager
1158 ? 00:00:04 udisksd
1159 ? 00:00:00 firewalld
1169 ? 00:00:00 systemd-logind
1173 ? 00:00:00 accounts-daemon
1177 ? 00:00:00 alsactl
1249 ? 00:00:00 abrtd
1262 ? 00:00:00 iprt-VBoxWQueue
1268 ? 00:00:00 iprt-VBoxTscThr
1281 ? 00:00:00 abrt-dump-journ
1286 ? 00:00:00 abrt-dump-journ
1324 ? 00:00:12 NetworkManager
1376 ? 00:00:00 libvirtd
1393 ? 00:00:00 crond
1396 ? 00:00:00 atd
1398 ? 00:00:00 gdm
1463 ? 00:00:01 wpa_supplicant
1509 ? 00:00:00 gdm-session-wor
1582 ? 00:00:00 upowerd
1680 ? 00:00:10 packagekitd
1839 ? 00:00:00 dhclient
1903 ? 00:00:00 gdm-session-wor
1938 tty2 00:00:00 xf86-video-inte
2067 ? 00:00:00 krfcommd
2206 ? 00:00:00 cupsd
2235 ? 00:00:00 fwupd
2448 ? 00:00:00 dhclient
2633 ? 00:00:00 kworker/0:1
3387 ? 00:00:00 kworker/3:1
3927 ? 00:00:04 kworker/u16:0
5168 ? 00:00:00 kworker/1:3
13278 ? 00:00:00 kworker/u16:5
13551 ? 00:00:00 kworker/u16:1
13663 ? 00:00:00 kworker/1:1
13668 ? 00:00:00 kworker/3:0
13681 ? 00:00:00 kworker/2:2
13694 ? 00:00:00 kworker/0:3
13724 ? 00:00:00 kworker/u16:4
13765 ? 00:00:00 kworker/1:2
13781 ? 00:00:00 kworker/2:0
13794 ? 00:00:00 kworker/3:3
13799 ? 00:00:00 kworker/0:2
14835 pts/1 00:00:00 sudo
14848 pts/1 00:00:00 su
14853 pts/1 00:00:00 bash
14902 ? 00:00:00 kworker/2:3
14903 pts/1 00:00:00 python
15764 ? 00:00:00 fprintd
15897 pts/1 00:00:00 ps
16120 ? 00:00:00 kworker/2:1
18809 ? 00:00:00 ceph-msgr
18811 ? 00:00:00 rbd
28322 ? 00:00:00 systemd
28332 ? 00:00:00 (sd-pam)
28799 ? 00:00:10 kworker/u16:2
29866 ? 00:00:00 kworker/3:2
30529 ? 00:00:00 dio/dm-3
31742 ? 00:00:00 kworker/0:0
32711 ? 00:00:00 kworker/1:0
2016-07-14 21:49:40,948.948 ERROR:__main__:tried to stop a non-running daemon
2016-07-14 21:49:40,949.949 INFO:__main__:run args=['./bin/ceph', 'fs', 'ls', '--format=json-pretty']
2016-07-14 21:49:40,950.950 INFO:__main__:Running ['./bin/ceph', 'fs', 'ls', '--format=json-pretty']
2016-07-14 21:49:41,133.133 INFO:__main__:run args=['./bin/ceph', 'fs', 'set', u'cephfs', 'cluster_down', 'true']
2016-07-14 21:49:41,133.133 INFO:__main__:Running ['./bin/ceph', 'fs', 'set', u'cephfs', 'cluster_down', 'true']
2016-07-14 21:49:41,578.578 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', u'cephfs', '--format=json-pretty']
2016-07-14 21:49:41,578.578 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', u'cephfs', '--format=json-pretty']
2016-07-14 21:49:41,770.770 INFO:__main__:run args=['./bin/ceph', 'mds', 'fail', '4123']
2016-07-14 21:49:41,770.770 INFO:__main__:Running ['./bin/ceph', 'mds', 'fail', '4123']
2016-07-14 21:49:42,617.617 INFO:__main__:run args=['./bin/ceph', 'fs', 'rm', u'cephfs', '--yes-i-really-mean-it']
2016-07-14 21:49:42,617.617 INFO:__main__:Running ['./bin/ceph', 'fs', 'rm', u'cephfs', '--yes-i-really-mean-it']
2016-07-14 21:49:43,655.655 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_metadata', u'cephfs_metadata', '--yes-i-really-really-mean-it']
2016-07-14 21:49:43,655.655 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_metadata', u'cephfs_metadata', '--yes-i-really-really-mean-it']
2016-07-14 21:49:44,708.708 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_data', u'cephfs_data', '--yes-i-really-really-mean-it']
2016-07-14 21:49:44,708.708 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_data', u'cephfs_data', '--yes-i-really-really-mean-it']
2016-07-14 21:49:45,790.790 INFO:__main__:run args=['./bin/ceph', 'daemon', 'osd.0', 'config', 'get', 'osd_mon_report_interval_max']
2016-07-14 21:49:45,790.790 INFO:__main__:Running ['./bin/ceph', 'daemon', 'osd.0', 'config', 'get', 'osd_mon_report_interval_max']
2016-07-14 21:49:45,890.890 INFO:tasks.cephfs.filesystem:_json_asok output: {
"osd_mon_report_interval_max": "5"
}

2016-07-14 21:49:45,891.891 INFO:__main__:run args=['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:49:45,891.891 INFO:__main__:Running ['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:49:46,093.093 INFO:__main__:run args=['./bin/ceph', 'osd', 'blacklist', 'clear']
2016-07-14 21:49:46,093.093 INFO:__main__:Running ['./bin/ceph', 'osd', 'blacklist', 'clear']
2016-07-14 21:49:46,835.835 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.0', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:46,835.835 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.0', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,034.034 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.1', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,034.034 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.1', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,255.255 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.2', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,255.255 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.2', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,445.445 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.3', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,446.446 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.3', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,665.665 INFO:tasks.cephfs.cephfs_test_case:['0', '1', '2', '3']
2016-07-14 21:49:47,665.665 INFO:__main__:run args=['./bin/ceph', 'auth', 'list', '--format=json-pretty']
2016-07-14 21:49:47,666.666 INFO:__main__:Running ['./bin/ceph', 'auth', 'list', '--format=json-pretty']
2016-07-14 21:49:47,855.855 INFO:tasks.cephfs.filesystem:Creating filesystem 'cephfs'
2016-07-14 21:49:47,855.855 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_min_per_osd']
2016-07-14 21:49:47,855.855 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_min_per_osd']
2016-07-14 21:49:47,957.957 INFO:tasks.cephfs.filesystem:_json_asok output: {
"mon_pg_warn_min_per_osd": "3"
}

2016-07-14 21:49:47,958.958 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_metadata', '9']
2016-07-14 21:49:47,958.958 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_metadata', '9']
2016-07-14 21:49:48,860.860 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_data', '9']
2016-07-14 21:49:48,860.860 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_data', '9']
2016-07-14 21:49:49,917.917 INFO:__main__:run args=['./bin/ceph', 'fs', 'new', 'cephfs', 'cephfs_metadata', 'cephfs_data']
2016-07-14 21:49:49,918.918 INFO:__main__:Running ['./bin/ceph', 'fs', 'new', 'cephfs', 'cephfs_metadata', 'cephfs_data']
2016-07-14 21:49:50,479.479 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:50,479.479 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:50,496.496 INFO:__main__:No match for mds a: PID TTY TIME CMD
1 ? 00:00:02 systemd
2 ? 00:00:00 kthreadd
3 ? 00:00:00 ksoftirqd/0
5 ? 00:00:00 kworker/0:0H
7 ? 00:00:36 rcu_sched
8 ? 00:00:00 rcu_bh
9 ? 00:00:15 rcuos/0
10 ? 00:00:00 rcuob/0
11 ? 00:00:00 migration/0
12 ? 00:00:00 watchdog/0
13 ? 00:00:00 watchdog/1
14 ? 00:00:00 migration/1
15 ? 00:00:00 ksoftirqd/1
17 ? 00:00:00 kworker/1:0H
18 ? 00:00:06 rcuos/1
19 ? 00:00:00 rcuob/1
20 ? 00:00:00 watchdog/2
21 ? 00:00:00 migration/2
22 ? 00:00:00 ksoftirqd/2
24 ? 00:00:00 kworker/2:0H
25 ? 00:00:14 rcuos/2
26 ? 00:00:00 rcuob/2
27 ? 00:00:00 watchdog/3
28 ? 00:00:00 migration/3
29 ? 00:00:00 ksoftirqd/3
31 ? 00:00:00 kworker/3:0H
32 ? 00:00:05 rcuos/3
33 ? 00:00:00 rcuob/3
34 ? 00:00:00 kdevtmpfs
35 ? 00:00:00 netns
36 ? 00:00:00 writeback
37 ? 00:00:00 ksmd
38 ? 00:00:00 khugepaged
39 ? 00:00:00 crypto
40 ? 00:00:00 kintegrityd
41 ? 00:00:00 bioset
42 ? 00:00:00 kblockd
43 ? 00:00:00 ata_sff
44 ? 00:00:00 md
45 ? 00:00:00 devfreq_wq
51 ? 00:00:13 kswapd0
52 ? 00:00:00 vmstat
93 ? 00:00:00 kthrotld
94 ? 00:00:00 acpi_thermal_pm
95 ? 00:00:00 scsi_eh_0
96 ? 00:00:00 scsi_tmf_0
97 ? 00:00:00 scsi_eh_1
98 ? 00:00:00 scsi_tmf_1
99 ? 00:00:00 scsi_eh_2
100 ? 00:00:00 scsi_tmf_2
104 ? 00:00:00 kpsmoused
106 ? 00:00:00 dm_bufio_cache
107 ? 00:00:00 ipv6_addrconf
138 ? 00:00:00 deferwq
140 ? 00:00:00 bioset
178 ? 00:00:00 kauditd
389 ? 00:00:01 kworker/0:1H
399 ? 00:00:00 kworker/1:1H
400 ? 00:00:00 kworker/3:1H
420 ? 00:00:00 rtsx_pci_sdmmc_
425 ? 00:00:00 kworker/2:1H
613 ? 00:00:00 kdmflush
621 ? 00:00:00 bioset
622 ? 00:00:00 kcryptd_io
623 ? 00:00:00 kcryptd
624 ? 00:00:08 dmcrypt_write
625 ? 00:00:00 bioset
688 ? 00:00:00 kdmflush
691 ? 00:00:00 bioset
700 ? 00:00:00 kdmflush
701 ? 00:00:00 bioset
724 ? 00:00:00 jbd2/dm-1-8
725 ? 00:00:00 ext4-rsv-conver
823 ? 00:00:02 systemd-journal
861 ? 00:00:00 rpciod
880 ? 00:00:00 systemd-udevd
910 ? 00:00:00 ktpacpid
918 ? 00:00:00 irq/47-mei_me
949 ? 00:00:00 kmemstick
968 ? 00:00:00 cfg80211
984 ? 00:00:07 irq/50-iwlwifi
999 ? 00:00:00 kworker/u17:0
1001 ? 00:00:00 hci0
1002 ? 00:00:00 hci0
1007 ? 00:00:00 kworker/u17:2
1015 ? 00:00:00 kdmflush
1016 ? 00:00:00 bioset
1018 ? 00:00:00 kvm-irqfd-clean
1071 ? 00:00:00 jbd2/sda1-8
1072 ? 00:00:00 ext4-rsv-conver
1077 ? 00:00:06 jbd2/dm-3-8
1078 ? 00:00:00 ext4-rsv-conver
1105 ? 00:00:00 auditd
1119 ? 00:00:00 audispd
1123 ? 00:00:00 sedispatch
1133 ? 00:00:00 bluetoothd
1137 ? 00:00:00 mcelog
1139 ? 00:00:39 rngd
1146 ? 00:00:00 gssproxy
1154 ? 00:00:00 ModemManager
1158 ? 00:00:04 udisksd
1159 ? 00:00:00 firewalld
1169 ? 00:00:00 systemd-logind
1173 ? 00:00:00 accounts-daemon
1177 ? 00:00:00 alsactl
1249 ? 00:00:00 abrtd
1262 ? 00:00:00 iprt-VBoxWQueue
1268 ? 00:00:00 iprt-VBoxTscThr
1281 ? 00:00:00 abrt-dump-journ
1286 ? 00:00:00 abrt-dump-journ
1324 ? 00:00:12 NetworkManager
1376 ? 00:00:00 libvirtd
1393 ? 00:00:00 crond
1396 ? 00:00:00 atd
1398 ? 00:00:00 gdm
1463 ? 00:00:01 wpa_supplicant
1509 ? 00:00:00 gdm-session-wor
1582 ? 00:00:00 upowerd
1680 ? 00:00:10 packagekitd
1839 ? 00:00:00 dhclient
1903 ? 00:00:00 gdm-session-wor
1938 tty2 00:00:00 xf86-video-inte
2067 ? 00:00:00 krfcommd
2206 ? 00:00:00 cupsd
2235 ? 00:00:00 fwupd
2448 ? 00:00:00 dhclient
2633 ? 00:00:00 kworker/0:1
3387 ? 00:00:00 kworker/3:1
3927 ? 00:00:04 kworker/u16:0
5168 ? 00:00:00 kworker/1:3
13278 ? 00:00:00 kworker/u16:5
13551 ? 00:00:00 kworker/u16:1
13663 ? 00:00:00 kworker/1:1
13668 ? 00:00:00 kworker/3:0
13681 ? 00:00:00 kworker/2:2
13694 ? 00:00:00 kworker/0:3
13724 ? 00:00:00 kworker/u16:4
13765 ? 00:00:00 kworker/1:2
13781 ? 00:00:00 kworker/2:0
13794 ? 00:00:00 kworker/3:3
13799 ? 00:00:00 kworker/0:2
14835 pts/1 00:00:00 sudo
14848 pts/1 00:00:00 su
14853 pts/1 00:00:00 bash
14902 ? 00:00:00 kworker/2:3
14903 pts/1 00:00:00 python
15764 ? 00:00:00 fprintd
16120 ? 00:00:00 kworker/2:1
16419 pts/1 00:00:00 ps
18809 ? 00:00:00 ceph-msgr
18811 ? 00:00:00 rbd
28322 ? 00:00:00 systemd
28332 ? 00:00:00 (sd-pam)
28799 ? 00:00:10 kworker/u16:2
29866 ? 00:00:00 kworker/3:2
30529 ? 00:00:00 dio/dm-3
31742 ? 00:00:00 kworker/0:0
32711 ? 00:00:00 kworker/1:0
2016-07-14 21:49:50,497.497 INFO:__main__:run args=['./bin/./ceph-mds', '-i', 'a']
2016-07-14 21:49:50,497.497 INFO:__main__:Running ['./bin/./ceph-mds', '-i', 'a']
2016-07-14 21:49:50,532.532 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:50,533.533 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:50,736.736 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4147}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 5, u'failed': [], u'epoch': 15, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [6], u'info': {u'gid_4147': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 15, u'standby_replay': False, u'state_seq': 1, u'standby_for_fscid': -1, u'state': u'up:creating', u'gid': 4147, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/15180'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:50.110782', u'enabled': True, u'modified': u'2016-07-14 21:49:50.110782', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:50,736.736 WARNING:tasks.cephfs.filesystem:Unhealthy mds state gid_4147:up:creating
2016-07-14 21:49:51,736.736 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:51,737.737 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:51,921.921 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4147}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 5, u'failed': [], u'epoch': 15, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [6], u'info': {u'gid_4147': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 15, u'standby_replay': False, u'state_seq': 1, u'standby_for_fscid': -1, u'state': u'up:creating', u'gid': 4147, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/15180'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:50.110782', u'enabled': True, u'modified': u'2016-07-14 21:49:50.110782', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:51,921.921 WARNING:tasks.cephfs.filesystem:Unhealthy mds state gid_4147:up:creating
2016-07-14 21:49:52,922.922 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:52,922.922 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:53,106.106 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4147}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 5, u'failed': [], u'epoch': 16, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [6], u'info': {u'gid_4147': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 15, u'standby_replay': False, u'state_seq': 4, u'standby_for_fscid': -1, u'state': u'up:active', u'gid': 4147, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/15180'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:50.110782', u'enabled': True, u'modified': u'2016-07-14 21:49:50.110782', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:53,106.106 INFO:tasks.cephfs.filesystem:are_daemons_healthy: 1/1
2016-07-14 21:49:53,106.106 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mds.a', 'status']
2016-07-14 21:49:53,107.107 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mds.a', 'status']
2016-07-14 21:49:53,223.223 INFO:tasks.cephfs.filesystem:_json_asok output: {
"cluster_fsid": "ae66e242-d103-408a-b4cf-f160f95d365c",
"whoami": 0,
"want_state": "up:active",
"state": "up:active",
"mdsmap_epoch": 16,
"osdmap_epoch": 24,
"osdmap_epoch_barrier": 24
}

2016-07-14 21:49:53,224.224 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,224.224 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:53,237.237 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:53,237.237 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,238.238 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,247.247 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,247.247 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:53,263.263 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,264.264 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,275.275 INFO:__main__:Pre-mount connections: [43, 47]
2016-07-14 21:49:53,275.275 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,275.275 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,282.282 INFO:__main__:Mounting client.0 with pid 16513
2016-07-14 21:49:53,283.283 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,283.283 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:53,297.297 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,297.297 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,312.312 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,313.313 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:54,321.321 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,321.321 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,332.332 INFO:__main__:Post-mount connections: [43, 47, 48]
2016-07-14 21:49:54,333.333 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:54,334.334 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:54,343.343 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:54,343.343 INFO:__main__:run args=['sudo', 'chmod', '1777', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:54,344.344 INFO:__main__:Running ['chmod', '1777', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:54,357.357 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,357.357 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:54,364.364 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:54,364.364 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,365.365 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,372.372 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,373.373 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:54,388.388 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,388.388 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,396.396 INFO:__main__:Pre-mount connections: [43, 47, 48]
2016-07-14 21:49:54,396.396 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,396.396 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,408.408 INFO:__main__:Mounting client.1 with pid 16555
2016-07-14 21:49:54,410.410 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,411.411 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:54,426.426 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,427.427 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:55,440.440 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:55,440.440 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:55,450.450 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:55,452.452 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:55,461.461 INFO:__main__:Post-mount connections: [43, 47, 48, 49]
2016-07-14 21:49:55,461.461 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,461.461 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,476.476 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:55,476.476 INFO:__main__:run args=['sudo', 'chmod', '1777', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,476.476 INFO:__main__:Running ['chmod', '1777', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,487.487 INFO:__main__:Searching for existing instance mon pg warn max per osd/global
2016-07-14 21:49:55,488.488 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,489.489 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,497.497 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:55,497.497 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:55,498.498 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,498.498 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,524.524 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,524.524 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,534.534 INFO:teuthology.orchestra.run:waiting for 900
2016-07-14 21:50:01,538.538 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:50:01,539.539 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:50:01,546.546 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.manila', 'mds', 'allow *', 'osd', 'allow rw', 'mon', 'allow *']
2016-07-14 21:50:01,546.546 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.manila', 'mds', 'allow *', 'osd', 'allow rw', 'mon', 'allow *']
2016-07-14 21:50:01,739.739 INFO:__main__:run args=['sudo', 'python', '-c', 'import shutil, sys; shutil.copyfileobj(sys.stdin, file(sys.argv[1], "wb"))', '/home/rraja/git/ceph/build/client.manila.keyring']
2016-07-14 21:50:01,739.739 INFO:__main__:Running ['python', '-c', 'import shutil, sys; shutil.copyfileobj(sys.stdin, file(sys.argv[1], "wb"))', '/home/rraja/git/ceph/build/client.manila.keyring']
2016-07-14 21:50:01,763.763 INFO:__main__:Searching for existing instance mon pg warn max per osd/global
2016-07-14 21:50:01,763.763 INFO:__main__:Searching for existing instance keyring/client.manila
2016-07-14 21:50:01,764.764 INFO:__main__:run args=['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:50:01,764.764 INFO:__main__:Running ['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:50:01,940.940 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_max_per_osd']
2016-07-14 21:50:01,940.940 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_max_per_osd']
2016-07-14 21:50:02,048.048 INFO:tasks.cephfs.filesystem:_json_asok output: {
"mon_pg_warn_max_per_osd": "300"
}

2016-07-14 21:50:02,048.048 INFO:tasks.cephfs.test_volume_client:max_per_osd 300
2016-07-14 21:50:02,048.048 INFO:tasks.cephfs.test_volume_client:osd_count 3
2016-07-14 21:50:02,048.048 INFO:tasks.cephfs.test_volume_client:max_overall 900
2016-07-14 21:50:02,049.049 INFO:tasks.cephfs.test_volume_client:existing_pg_count 26
2016-07-14 21:50:02,049.049 INFO:tasks.cephfs.test_volume_client:expected_pg_num 87
2016-07-14 21:50:02,049.049 INFO:__main__:run args=['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:50:02,049.049 INFO:__main__:Running ['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:50:02,234.234 INFO:__main__:run args=['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']
2016-07-14 21:50:02,235.235 INFO:__main__:Running ['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']
src/tcmalloc.cc:283] Attempt to free invalid pointer 0x561bc63d3940
2016-07-14 21:50:02,503.503 INFO:__main__:test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient) ... ERROR
2016-07-14 21:50:02,504.504 ERROR:__main__:Traceback (most recent call last):
File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 370, in test_data_isolated
guest_entity=guest_entity
File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 34, in _volume_client_python
""".format(payload=script, conf_path=client.config_path, vol_prefix=vol_prefix, ns_prefix=ns_prefix))
File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/mount.py", line 134, in run_python
p.wait()
File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/vstart_runner.py", line 113, in wait
raise CommandFailedError(self.args, self.exitstatus)
CommandFailedError: Command failed with status -6: ['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']

2016-07-14 21:50:02,504.504 ERROR:__main__:Error in test 'test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient)', going interactive
Ceph test interactive mode, use ctx to interact with the cluster, press control-D to exit...
>>>
2016-07-14 21:56:08,890.890 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,890.890 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,901.901 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:56:08,901.901 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:56:08,901.901 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,901.901 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,920.920 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,920.920 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,932.932 INFO:__main__:kill
2016-07-14 21:56:08,932.932 INFO:__main__:kill: killing pid 16513 (['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0'])
ceph-fuse[16513]: starting ceph client
2016-07-14 21:49:53.297548 7facfb952f40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:53.297636 7facfb952f40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:53.303164 7facfb952f40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:53.303889 7facfb952f40 -1 init, newargv = 0xa149080 newargc=11
ceph-fuse[16513]: starting fuse
ceph-fuse[16513]: fuse finished with error 0 and tester_r 0
2016-07-14 21:56:08,936.936 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,936.936 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,945.945 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:56:08,945.945 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:56:08,956.956 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:56:08,956.956 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:56:08,956.956 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:56:08,968.968 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:56:08,969.969 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:56:08,976.976 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.2
2016-07-14 21:56:08,977.977 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:56:08,977.977 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:56:08,985.985 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:56:08,985.985 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:56:08,995.995 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.3
2016-07-14 21:56:08,995.995 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:56:08,995.995 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:56:09,006.006 INFO:__main__:Searching for existing instance mon pg warn max per osd/global
2016-07-14 21:56:09,007.007 INFO:__main__:Stopped test: test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient) in 388.115495s
2016-07-14 21:56:09,007.007 INFO:__main__:
2016-07-14 21:56:09,007.007 INFO:__main__:======================================================================
2016-07-14 21:56:09,007.007 INFO:__main__:ERROR: test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient)
2016-07-14 21:56:09,007.007 INFO:__main__:----------------------------------------------------------------------
2016-07-14 21:56:09,007.007 INFO:__main__:Traceback (most recent call last):
2016-07-14 21:56:09,007.007 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 370, in test_data_isolated
2016-07-14 21:56:09,007.007 INFO:__main__: guest_entity=guest_entity
2016-07-14 21:56:09,007.007 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 34, in _volume_client_python
2016-07-14 21:56:09,008.008 INFO:__main__: """.format(payload=script, conf_path=client.config_path, vol_prefix=vol_prefix, ns_prefix=ns_prefix))
2016-07-14 21:56:09,008.008 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/mount.py", line 134, in run_python
2016-07-14 21:56:09,008.008 INFO:__main__: p.wait()
2016-07-14 21:56:09,008.008 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/vstart_runner.py", line 113, in wait
2016-07-14 21:56:09,008.008 INFO:__main__: raise CommandFailedError(self.args, self.exitstatus)
2016-07-14 21:56:09,008.008 INFO:__main__:CommandFailedError: Command failed with status -6: ['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']
2016-07-14 21:56:09,008.008 INFO:__main__:
2016-07-14 21:56:09,008.008 INFO:__main__:----------------------------------------------------------------------
2016-07-14 21:56:09,008.008 INFO:__main__:Ran 2 tests in 422.808s
2016-07-14 21:56:09,008.008 INFO:__main__:
2016-07-14 21:56:09,008.008 INFO:__main__:FAILED (errors=1)
2016-07-14 21:56:09,008.008 INFO:__main__:
2016-07-14 21:56:09,008.008 INFO:__main__:======================================================================
2016-07-14 21:56:09,008.008 INFO:__main__:ERROR: test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient)
2016-07-14 21:56:09,009.009 INFO:__main__:----------------------------------------------------------------------
2016-07-14 21:56:09,009.009 INFO:__main__:Traceback (most recent call last):
2016-07-14 21:56:09,009.009 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 370, in test_data_isolated
2016-07-14 21:56:09,009.009 INFO:__main__: guest_entity=guest_entity
2016-07-14 21:56:09,009.009 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 34, in _volume_client_python
2016-07-14 21:56:09,009.009 INFO:__main__: """.format(payload=script, conf_path=client.config_path, vol_prefix=vol_prefix, ns_prefix=ns_prefix))
2016-07-14 21:56:09,009.009 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/mount.py", line 134, in run_python
2016-07-14 21:56:09,009.009 INFO:__main__: p.wait()
2016-07-14 21:56:09,009.009 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/vstart_runner.py", line 113, in wait
2016-07-14 21:56:09,009.009 INFO:__main__: raise CommandFailedError(self.args, self.exitstatus)
2016-07-14 21:56:09,009.009 INFO:__main__:CommandFailedError: Command failed with status -6: ['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']
2016-07-14 21:56:09,009.009 INFO:__main__:
[root@bzn build]# LD_LIBRARY_PATH=/home/rraja/git/ceph/build/lib PYTHONPATH=/home/rraja/git/teuthology/:/home/rraja/git/ceph-qa-suite/:/home/rraja/git/ceph/src/pybind:/home/rraja/git/ceph/build/lib/cython_modules/lib.linux-x86_64-2.7/ python /home/rraja/git/ceph-qa-suite/tasks/cephfs/vstart_runner.py --interactive tasks.cephfs.test_volume_client.TestVolumeClient
2016-07-14 21:49:05,160.160 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:05,160.160 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:05,175.175 WARNING:__main__:Killing stray process 3133 ? 00:00:03 ceph-mds
2016-07-14 21:49:05,178.178 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.0', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,178.178 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.0', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,392.392 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:05,392.392 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:05,400.400 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:05,401.401 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.1', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,401.401 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.1', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,601.601 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:05,601.601 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:05,608.608 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:05,609.609 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.2', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,609.609 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.2', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,825.825 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:05,825.825 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:49:05,833.833 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.2
2016-07-14 21:49:05,833.833 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.3', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:05,834.834 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.3', 'osd', 'allow rw', 'mds', 'allow', 'mon', 'allow r']
2016-07-14 21:49:06,038.038 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:06,038.038 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:49:06,046.046 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.3
2016-07-14 21:49:06,046.046 INFO:__main__:Discovered MDS IDs: ['a']
2016-07-14 21:49:06,048.048 INFO:__main__:run args=['./bin/ceph', 'tell', 'osd.*', 'injectargs', '--osd-mon-report-interval-max', '5']
2016-07-14 21:49:06,048.048 INFO:__main__:Running ['./bin/ceph', 'tell', 'osd.*', 'injectargs', '--osd-mon-report-interval-max', '5']
2016-07-14 21:49:06,188.188 INFO:__main__:Searching for existing instance osd_mon_report_interval_max/osd
2016-07-14 21:49:06,190.190 INFO:__main__:Searching for existing instance mds log max segments/mds
2016-07-14 21:49:06,190.190 INFO:__main__:Found string to replace at 1722
2016-07-14 21:49:06,190.190 INFO:__main__:Searching for existing instance osd_mon_report_interval_max/osd
2016-07-14 21:49:06,191.191 INFO:__main__:Searching for existing instance mds log max segments/mds
2016-07-14 21:49:06,192.192 INFO:__main__:Searching for existing instance mds root ino uid/global
2016-07-14 21:49:06,192.192 INFO:__main__:Searching for existing instance osd_mon_report_interval_max/osd
2016-07-14 21:49:06,193.193 INFO:__main__:Searching for existing instance mds log max segments/mds
2016-07-14 21:49:06,193.193 INFO:__main__:Searching for existing instance mds root ino uid/global
2016-07-14 21:49:06,193.193 INFO:__main__:Searching for existing instance mds root ino gid/global
2016-07-14 21:49:06,193.193 INFO:__main__:Searching for existing instance osd_mon_report_interval_max/osd
2016-07-14 21:49:06,194.194 INFO:__main__:Executing modules: ['tasks.cephfs.test_volume_client.TestVolumeClient']
2016-07-14 21:49:06,198.198 INFO:__main__:Loaded: [<unittest.suite.TestSuite tests=[<tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_15303>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_data_isolated>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_default_prefix>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_evict_client>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_idempotency>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_lifecycle>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_purge>, <tasks.cephfs.test_volume_client.TestVolumeClient testMethod=test_readonly_authorization>]>]
2016-07-14 21:49:06,198.198 INFO:__main__:Disabling 0 tests because of is_for_teuthology or needs_trimming
2016-07-14 21:49:06,199.199 INFO:__main__:Starting test: test_15303 (tasks.cephfs.test_volume_client.TestVolumeClient)
2016-07-14 21:49:06,199.199 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:06,199.199 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:06,207.207 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:06,207.207 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:06,207.207 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:06,216.216 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:06,217.217 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:06,217.217 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:06,239.239 INFO:__main__:No match for mds a: PID TTY TIME CMD
1 ? 00:00:02 systemd
2 ? 00:00:00 kthreadd
3 ? 00:00:00 ksoftirqd/0
5 ? 00:00:00 kworker/0:0H
7 ? 00:00:36 rcu_sched
8 ? 00:00:00 rcu_bh
9 ? 00:00:15 rcuos/0
10 ? 00:00:00 rcuob/0
11 ? 00:00:00 migration/0
12 ? 00:00:00 watchdog/0
13 ? 00:00:00 watchdog/1
14 ? 00:00:00 migration/1
15 ? 00:00:00 ksoftirqd/1
17 ? 00:00:00 kworker/1:0H
18 ? 00:00:06 rcuos/1
19 ? 00:00:00 rcuob/1
20 ? 00:00:00 watchdog/2
21 ? 00:00:00 migration/2
22 ? 00:00:00 ksoftirqd/2
24 ? 00:00:00 kworker/2:0H
25 ? 00:00:14 rcuos/2
26 ? 00:00:00 rcuob/2
27 ? 00:00:00 watchdog/3
28 ? 00:00:00 migration/3
29 ? 00:00:00 ksoftirqd/3
31 ? 00:00:00 kworker/3:0H
32 ? 00:00:05 rcuos/3
33 ? 00:00:00 rcuob/3
34 ? 00:00:00 kdevtmpfs
35 ? 00:00:00 netns
36 ? 00:00:00 writeback
37 ? 00:00:00 ksmd
38 ? 00:00:00 khugepaged
39 ? 00:00:00 crypto
40 ? 00:00:00 kintegrityd
41 ? 00:00:00 bioset
42 ? 00:00:00 kblockd
43 ? 00:00:00 ata_sff
44 ? 00:00:00 md
45 ? 00:00:00 devfreq_wq
51 ? 00:00:13 kswapd0
52 ? 00:00:00 vmstat
93 ? 00:00:00 kthrotld
94 ? 00:00:00 acpi_thermal_pm
95 ? 00:00:00 scsi_eh_0
96 ? 00:00:00 scsi_tmf_0
97 ? 00:00:00 scsi_eh_1
98 ? 00:00:00 scsi_tmf_1
99 ? 00:00:00 scsi_eh_2
100 ? 00:00:00 scsi_tmf_2
104 ? 00:00:00 kpsmoused
106 ? 00:00:00 dm_bufio_cache
107 ? 00:00:00 ipv6_addrconf
138 ? 00:00:00 deferwq
140 ? 00:00:00 bioset
178 ? 00:00:00 kauditd
389 ? 00:00:01 kworker/0:1H
399 ? 00:00:00 kworker/1:1H
400 ? 00:00:00 kworker/3:1H
420 ? 00:00:00 rtsx_pci_sdmmc_
425 ? 00:00:00 kworker/2:1H
613 ? 00:00:00 kdmflush
621 ? 00:00:00 bioset
622 ? 00:00:00 kcryptd_io
623 ? 00:00:00 kcryptd
624 ? 00:00:08 dmcrypt_write
625 ? 00:00:00 bioset
688 ? 00:00:00 kdmflush
691 ? 00:00:00 bioset
700 ? 00:00:00 kdmflush
701 ? 00:00:00 bioset
724 ? 00:00:00 jbd2/dm-1-8
725 ? 00:00:00 ext4-rsv-conver
823 ? 00:00:02 systemd-journal
861 ? 00:00:00 rpciod
880 ? 00:00:00 systemd-udevd
910 ? 00:00:00 ktpacpid
918 ? 00:00:00 irq/47-mei_me
949 ? 00:00:00 kmemstick
968 ? 00:00:00 cfg80211
984 ? 00:00:07 irq/50-iwlwifi
999 ? 00:00:00 kworker/u17:0
1001 ? 00:00:00 hci0
1002 ? 00:00:00 hci0
1007 ? 00:00:00 kworker/u17:2
1015 ? 00:00:00 kdmflush
1016 ? 00:00:00 bioset
1018 ? 00:00:00 kvm-irqfd-clean
1071 ? 00:00:00 jbd2/sda1-8
1072 ? 00:00:00 ext4-rsv-conver
1077 ? 00:00:06 jbd2/dm-3-8
1078 ? 00:00:00 ext4-rsv-conver
1105 ? 00:00:00 auditd
1119 ? 00:00:00 audispd
1123 ? 00:00:00 sedispatch
1133 ? 00:00:00 bluetoothd
1137 ? 00:00:00 mcelog
1139 ? 00:00:39 rngd
1146 ? 00:00:00 gssproxy
1154 ? 00:00:00 ModemManager
1158 ? 00:00:04 udisksd
1159 ? 00:00:00 firewalld
1169 ? 00:00:00 systemd-logind
1173 ? 00:00:00 accounts-daemon
1177 ? 00:00:00 alsactl
1249 ? 00:00:00 abrtd
1262 ? 00:00:00 iprt-VBoxWQueue
1268 ? 00:00:00 iprt-VBoxTscThr
1281 ? 00:00:00 abrt-dump-journ
1286 ? 00:00:00 abrt-dump-journ
1324 ? 00:00:12 NetworkManager
1376 ? 00:00:00 libvirtd
1393 ? 00:00:00 crond
1396 ? 00:00:00 atd
1398 ? 00:00:00 gdm
1463 ? 00:00:01 wpa_supplicant
1509 ? 00:00:00 gdm-session-wor
1582 ? 00:00:00 upowerd
1680 ? 00:00:10 packagekitd
1839 ? 00:00:00 dhclient
1903 ? 00:00:00 gdm-session-wor
1938 tty2 00:00:00 xf86-video-inte
2067 ? 00:00:00 krfcommd
2206 ? 00:00:00 cupsd
2235 ? 00:00:00 fwupd
2448 ? 00:00:00 dhclient
2633 ? 00:00:00 kworker/0:1
3387 ? 00:00:00 kworker/3:1
3927 ? 00:00:04 kworker/u16:0
5168 ? 00:00:00 kworker/1:3
13278 ? 00:00:00 kworker/u16:5
13551 ? 00:00:00 kworker/u16:1
13663 ? 00:00:00 kworker/1:1
13668 ? 00:00:00 kworker/3:0
13681 ? 00:00:00 kworker/2:2
13694 ? 00:00:00 kworker/0:3
13724 ? 00:00:00 kworker/u16:4
13765 ? 00:00:00 kworker/1:2
13781 ? 00:00:00 kworker/2:0
13794 ? 00:00:00 kworker/3:3
13799 ? 00:00:00 kworker/0:2
14835 pts/1 00:00:00 sudo
14836 ? 00:00:00 fprintd
14848 pts/1 00:00:00 su
14853 pts/1 00:00:00 bash
14902 ? 00:00:00 kworker/2:3
14903 pts/1 00:00:00 python
15074 pts/1 00:00:00 ps
16120 ? 00:00:00 kworker/2:1
18809 ? 00:00:00 ceph-msgr
18811 ? 00:00:00 rbd
28322 ? 00:00:00 systemd
28332 ? 00:00:00 (sd-pam)
28799 ? 00:00:10 kworker/u16:2
29866 ? 00:00:00 kworker/3:2
30529 ? 00:00:00 dio/dm-3
31742 ? 00:00:00 kworker/0:0
32711 ? 00:00:00 kworker/1:0
2016-07-14 21:49:06,240.240 ERROR:__main__:tried to stop a non-running daemon
2016-07-14 21:49:06,240.240 INFO:__main__:run args=['./bin/ceph', 'fs', 'ls', '--format=json-pretty']
2016-07-14 21:49:06,240.240 INFO:__main__:Running ['./bin/ceph', 'fs', 'ls', '--format=json-pretty']
2016-07-14 21:49:06,428.428 INFO:__main__:run args=['./bin/ceph', 'fs', 'set', u'cephfs_a', 'cluster_down', 'true']
2016-07-14 21:49:06,428.428 INFO:__main__:Running ['./bin/ceph', 'fs', 'set', u'cephfs_a', 'cluster_down', 'true']
2016-07-14 21:49:07,071.071 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', u'cephfs_a', '--format=json-pretty']
2016-07-14 21:49:07,071.071 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', u'cephfs_a', '--format=json-pretty']
2016-07-14 21:49:07,257.257 INFO:__main__:run args=['./bin/ceph', 'mds', 'fail', '4113']
2016-07-14 21:49:07,257.257 INFO:__main__:Running ['./bin/ceph', 'mds', 'fail', '4113']
2016-07-14 21:49:08,055.055 INFO:__main__:run args=['./bin/ceph', 'fs', 'rm', u'cephfs_a', '--yes-i-really-mean-it']
2016-07-14 21:49:08,055.055 INFO:__main__:Running ['./bin/ceph', 'fs', 'rm', u'cephfs_a', '--yes-i-really-mean-it']
2016-07-14 21:49:09,055.055 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_metadata_a', u'cephfs_metadata_a', '--yes-i-really-really-mean-it']
2016-07-14 21:49:09,055.055 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_metadata_a', u'cephfs_metadata_a', '--yes-i-really-really-mean-it']
2016-07-14 21:49:10,155.155 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_data_a', u'cephfs_data_a', '--yes-i-really-really-mean-it']
2016-07-14 21:49:10,155.155 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_data_a', u'cephfs_data_a', '--yes-i-really-really-mean-it']
2016-07-14 21:49:11,269.269 INFO:__main__:run args=['./bin/ceph', 'daemon', 'osd.0', 'config', 'get', 'osd_mon_report_interval_max']
2016-07-14 21:49:11,270.270 INFO:__main__:Running ['./bin/ceph', 'daemon', 'osd.0', 'config', 'get', 'osd_mon_report_interval_max']
2016-07-14 21:49:11,385.385 INFO:tasks.cephfs.filesystem:_json_asok output: {
"osd_mon_report_interval_max": "5"
}

2016-07-14 21:49:11,385.385 INFO:__main__:run args=['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:49:11,385.385 INFO:__main__:Running ['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:49:11,567.567 INFO:__main__:run args=['./bin/ceph', 'osd', 'blacklist', 'clear']
2016-07-14 21:49:11,567.567 INFO:__main__:Running ['./bin/ceph', 'osd', 'blacklist', 'clear']
2016-07-14 21:49:12,259.259 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.0', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,259.259 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.0', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,493.493 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.1', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,494.494 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.1', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,754.754 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.2', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,754.754 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.2', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,972.972 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.3', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:12,972.972 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.3', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:13,187.187 INFO:tasks.cephfs.cephfs_test_case:['0', '1', '2', '3']
2016-07-14 21:49:13,187.187 INFO:__main__:run args=['./bin/ceph', 'auth', 'list', '--format=json-pretty']
2016-07-14 21:49:13,188.188 INFO:__main__:Running ['./bin/ceph', 'auth', 'list', '--format=json-pretty']
2016-07-14 21:49:13,380.380 INFO:tasks.cephfs.filesystem:Creating filesystem 'cephfs'
2016-07-14 21:49:13,381.381 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_min_per_osd']
2016-07-14 21:49:13,381.381 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_min_per_osd']
2016-07-14 21:49:13,489.489 INFO:tasks.cephfs.filesystem:_json_asok output: {
"mon_pg_warn_min_per_osd": "3"
}

2016-07-14 21:49:13,489.489 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_metadata', '9']
2016-07-14 21:49:13,490.490 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_metadata', '9']
2016-07-14 21:49:14,342.342 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_data', '9']
2016-07-14 21:49:14,342.342 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_data', '9']
2016-07-14 21:49:15,351.351 INFO:__main__:run args=['./bin/ceph', 'fs', 'new', 'cephfs', 'cephfs_metadata', 'cephfs_data']
2016-07-14 21:49:15,351.351 INFO:__main__:Running ['./bin/ceph', 'fs', 'new', 'cephfs', 'cephfs_metadata', 'cephfs_data']
2016-07-14 21:49:16,416.416 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:16,416.416 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:16,440.440 INFO:__main__:No match for mds a: PID TTY TIME CMD
1 ? 00:00:02 systemd
2 ? 00:00:00 kthreadd
3 ? 00:00:00 ksoftirqd/0
5 ? 00:00:00 kworker/0:0H
7 ? 00:00:36 rcu_sched
8 ? 00:00:00 rcu_bh
9 ? 00:00:15 rcuos/0
10 ? 00:00:00 rcuob/0
11 ? 00:00:00 migration/0
12 ? 00:00:00 watchdog/0
13 ? 00:00:00 watchdog/1
14 ? 00:00:00 migration/1
15 ? 00:00:00 ksoftirqd/1
17 ? 00:00:00 kworker/1:0H
18 ? 00:00:06 rcuos/1
19 ? 00:00:00 rcuob/1
20 ? 00:00:00 watchdog/2
21 ? 00:00:00 migration/2
22 ? 00:00:00 ksoftirqd/2
24 ? 00:00:00 kworker/2:0H
25 ? 00:00:14 rcuos/2
26 ? 00:00:00 rcuob/2
27 ? 00:00:00 watchdog/3
28 ? 00:00:00 migration/3
29 ? 00:00:00 ksoftirqd/3
31 ? 00:00:00 kworker/3:0H
32 ? 00:00:05 rcuos/3
33 ? 00:00:00 rcuob/3
34 ? 00:00:00 kdevtmpfs
35 ? 00:00:00 netns
36 ? 00:00:00 writeback
37 ? 00:00:00 ksmd
38 ? 00:00:00 khugepaged
39 ? 00:00:00 crypto
40 ? 00:00:00 kintegrityd
41 ? 00:00:00 bioset
42 ? 00:00:00 kblockd
43 ? 00:00:00 ata_sff
44 ? 00:00:00 md
45 ? 00:00:00 devfreq_wq
51 ? 00:00:13 kswapd0
52 ? 00:00:00 vmstat
93 ? 00:00:00 kthrotld
94 ? 00:00:00 acpi_thermal_pm
95 ? 00:00:00 scsi_eh_0
96 ? 00:00:00 scsi_tmf_0
97 ? 00:00:00 scsi_eh_1
98 ? 00:00:00 scsi_tmf_1
99 ? 00:00:00 scsi_eh_2
100 ? 00:00:00 scsi_tmf_2
104 ? 00:00:00 kpsmoused
106 ? 00:00:00 dm_bufio_cache
107 ? 00:00:00 ipv6_addrconf
138 ? 00:00:00 deferwq
140 ? 00:00:00 bioset
178 ? 00:00:00 kauditd
389 ? 00:00:01 kworker/0:1H
399 ? 00:00:00 kworker/1:1H
400 ? 00:00:00 kworker/3:1H
420 ? 00:00:00 rtsx_pci_sdmmc_
425 ? 00:00:00 kworker/2:1H
613 ? 00:00:00 kdmflush
621 ? 00:00:00 bioset
622 ? 00:00:00 kcryptd_io
623 ? 00:00:00 kcryptd
624 ? 00:00:08 dmcrypt_write
625 ? 00:00:00 bioset
688 ? 00:00:00 kdmflush
691 ? 00:00:00 bioset
700 ? 00:00:00 kdmflush
701 ? 00:00:00 bioset
724 ? 00:00:00 jbd2/dm-1-8
725 ? 00:00:00 ext4-rsv-conver
823 ? 00:00:02 systemd-journal
861 ? 00:00:00 rpciod
880 ? 00:00:00 systemd-udevd
910 ? 00:00:00 ktpacpid
918 ? 00:00:00 irq/47-mei_me
949 ? 00:00:00 kmemstick
968 ? 00:00:00 cfg80211
984 ? 00:00:07 irq/50-iwlwifi
999 ? 00:00:00 kworker/u17:0
1001 ? 00:00:00 hci0
1002 ? 00:00:00 hci0
1007 ? 00:00:00 kworker/u17:2
1015 ? 00:00:00 kdmflush
1016 ? 00:00:00 bioset
1018 ? 00:00:00 kvm-irqfd-clean
1071 ? 00:00:00 jbd2/sda1-8
1072 ? 00:00:00 ext4-rsv-conver
1077 ? 00:00:06 jbd2/dm-3-8
1078 ? 00:00:00 ext4-rsv-conver
1105 ? 00:00:00 auditd
1119 ? 00:00:00 audispd
1123 ? 00:00:00 sedispatch
1133 ? 00:00:00 bluetoothd
1137 ? 00:00:00 mcelog
1139 ? 00:00:39 rngd
1146 ? 00:00:00 gssproxy
1154 ? 00:00:00 ModemManager
1158 ? 00:00:04 udisksd
1159 ? 00:00:00 firewalld
1169 ? 00:00:00 systemd-logind
1173 ? 00:00:00 accounts-daemon
1177 ? 00:00:00 alsactl
1249 ? 00:00:00 abrtd
1262 ? 00:00:00 iprt-VBoxWQueue
1268 ? 00:00:00 iprt-VBoxTscThr
1281 ? 00:00:00 abrt-dump-journ
1286 ? 00:00:00 abrt-dump-journ
1324 ? 00:00:12 NetworkManager
1376 ? 00:00:00 libvirtd
1393 ? 00:00:00 crond
1396 ? 00:00:00 atd
1398 ? 00:00:00 gdm
1463 ? 00:00:01 wpa_supplicant
1509 ? 00:00:00 gdm-session-wor
1582 ? 00:00:00 upowerd
1680 ? 00:00:10 packagekitd
1839 ? 00:00:00 dhclient
1903 ? 00:00:00 gdm-session-wor
1938 tty2 00:00:00 xf86-video-inte
2067 ? 00:00:00 krfcommd
2206 ? 00:00:00 cupsd
2235 ? 00:00:00 fwupd
2448 ? 00:00:00 dhclient
2633 ? 00:00:00 kworker/0:1
3387 ? 00:00:00 kworker/3:1
3927 ? 00:00:04 kworker/u16:0
5168 ? 00:00:00 kworker/1:3
13278 ? 00:00:00 kworker/u16:5
13551 ? 00:00:00 kworker/u16:1
13663 ? 00:00:00 kworker/1:1
13668 ? 00:00:00 kworker/3:0
13681 ? 00:00:00 kworker/2:2
13694 ? 00:00:00 kworker/0:3
13724 ? 00:00:00 kworker/u16:4
13765 ? 00:00:00 kworker/1:2
13781 ? 00:00:00 kworker/2:0
13794 ? 00:00:00 kworker/3:3
13799 ? 00:00:00 kworker/0:2
14835 pts/1 00:00:00 sudo
14836 ? 00:00:00 fprintd
14848 pts/1 00:00:00 su
14853 pts/1 00:00:00 bash
14902 ? 00:00:00 kworker/2:3
14903 pts/1 00:00:00 python
15599 pts/1 00:00:00 ps
16120 ? 00:00:00 kworker/2:1
18809 ? 00:00:00 ceph-msgr
18811 ? 00:00:00 rbd
28322 ? 00:00:00 systemd
28332 ? 00:00:00 (sd-pam)
28799 ? 00:00:10 kworker/u16:2
29866 ? 00:00:00 kworker/3:2
30529 ? 00:00:00 dio/dm-3
31742 ? 00:00:00 kworker/0:0
32711 ? 00:00:00 kworker/1:0
2016-07-14 21:49:16,441.441 INFO:__main__:run args=['./bin/./ceph-mds', '-i', 'a']
2016-07-14 21:49:16,441.441 INFO:__main__:Running ['./bin/./ceph-mds', '-i', 'a']
2016-07-14 21:49:16,471.471 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:16,472.472 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:16,667.667 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4123}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 3, u'failed': [], u'epoch': 10, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [4], u'info': {u'gid_4123': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 10, u'standby_replay': False, u'state_seq': 1, u'standby_for_fscid': -1, u'state': u'up:creating', u'gid': 4123, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/14766'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:15.555766', u'enabled': True, u'modified': u'2016-07-14 21:49:15.555766', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:16,668.668 WARNING:tasks.cephfs.filesystem:Unhealthy mds state gid_4123:up:creating
2016-07-14 21:49:17,668.668 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:17,668.668 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:17,866.866 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4123}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 3, u'failed': [], u'epoch': 11, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [4], u'info': {u'gid_4123': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 10, u'standby_replay': False, u'state_seq': 4, u'standby_for_fscid': -1, u'state': u'up:active', u'gid': 4123, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/14766'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:15.555766', u'enabled': True, u'modified': u'2016-07-14 21:49:15.555766', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:17,866.866 INFO:tasks.cephfs.filesystem:are_daemons_healthy: 1/1
2016-07-14 21:49:17,867.867 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mds.a', 'status']
2016-07-14 21:49:17,867.867 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mds.a', 'status']
2016-07-14 21:49:17,968.968 INFO:tasks.cephfs.filesystem:_json_asok output: {
"cluster_fsid": "ae66e242-d103-408a-b4cf-f160f95d365c",
"whoami": 0,
"want_state": "up:active",
"state": "up:active",
"mdsmap_epoch": 11,
"osdmap_epoch": 17,
"osdmap_epoch_barrier": 17
}

2016-07-14 21:49:17,969.969 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:17,969.969 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:17,983.983 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:17,984.984 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:17,984.984 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:17,991.991 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:17,992.992 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:18,007.007 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:18,008.008 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:18,019.019 INFO:__main__:Pre-mount connections: [43, 47]
2016-07-14 21:49:18,019.019 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:18,020.020 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:18,026.026 INFO:__main__:Mounting client.0 with pid 15669
2016-07-14 21:49:18,026.026 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:18,027.027 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:18,043.043 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:18,044.044 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,059.059 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,060.060 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:19,069.069 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,069.069 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,080.080 INFO:__main__:Post-mount connections: [43, 47, 48]
2016-07-14 21:49:19,080.080 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:19,081.081 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:19,089.089 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:19,089.089 INFO:__main__:run args=['sudo', 'chmod', '1777', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:19,089.089 INFO:__main__:Running ['chmod', '1777', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:19,101.101 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,101.101 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:19,112.112 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:19,112.112 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,113.113 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,121.121 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,121.121 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:19,133.133 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,134.134 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,143.143 INFO:__main__:Pre-mount connections: [43, 47, 48]
2016-07-14 21:49:19,143.143 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,144.144 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:19,152.152 INFO:__main__:Mounting client.1 with pid 15711
2016-07-14 21:49:19,152.152 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,152.152 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:19,165.165 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:19,166.166 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:20,176.176 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:20,176.176 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:20,185.185 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:20,185.185 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:20,193.193 INFO:__main__:Post-mount connections: [43, 47, 48, 49]
2016-07-14 21:49:20,194.194 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:20,194.194 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:20,202.202 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:20,202.202 INFO:__main__:run args=['sudo', 'chmod', '1777', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:20,202.202 INFO:__main__:Running ['chmod', '1777', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:20,218.218 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,218.218 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,229.229 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:20,229.229 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:20,229.229 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,230.230 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,260.260 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,261.261 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:20,270.270 INFO:teuthology.orchestra.run:waiting for 900
2016-07-14 21:49:26,277.277 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:26,277.277 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:26,286.286 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,287.287 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,294.294 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:26,294.294 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:26,294.294 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,294.294 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,323.323 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,323.323 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:26,333.333 INFO:teuthology.orchestra.run:waiting for 900
2016-07-14 21:49:32,338.338 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:32,339.339 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:32,346.346 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:32,347.347 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:49:32,358.358 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.2
2016-07-14 21:49:32,359.359 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:32,359.359 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.2']
rmdir: failed to remove '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:49:32,371.371 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:32,372.372 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:49:32,382.382 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.3
2016-07-14 21:49:32,383.383 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:32,384.384 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.3']
rmdir: failed to remove '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:49:32,392.392 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:32,392.392 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:32,405.405 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,405.405 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:32,419.419 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,420.420 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,427.427 INFO:__main__:Pre-mount connections: [43, 47]
2016-07-14 21:49:32,427.427 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:32,427.427 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:32,440.440 INFO:__main__:Mounting client.0 with pid 15797
2016-07-14 21:49:32,441.441 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,441.441 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:32,456.456 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:32,456.456 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,469.469 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,469.469 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:33,477.477 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,477.477 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,485.485 INFO:__main__:Post-mount connections: [43, 47, 48]
2016-07-14 21:49:33,486.486 INFO:__main__:run args=['mkdir', 'parent1']
2016-07-14 21:49:33,486.486 INFO:__main__:Running ['mkdir', 'parent1']
2016-07-14 21:49:33,520.520 INFO:__main__:run args=['mkdir', 'parent2']
2016-07-14 21:49:33,520.520 INFO:__main__:Running ['mkdir', 'parent2']
2016-07-14 21:49:33,531.531 INFO:__main__:run args=['mkdir', 'parent1/mydir']
2016-07-14 21:49:33,531.531 INFO:__main__:Running ['mkdir', 'parent1/mydir']
2016-07-14 21:49:33,557.557 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:33,557.557 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:33,564.564 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,564.564 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:33,576.576 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,576.576 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,588.588 INFO:__main__:Pre-mount connections: [43, 47, 48]
2016-07-14 21:49:33,589.589 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:33,589.589 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:33,596.596 INFO:__main__:Mounting client.1 with pid 15840
2016-07-14 21:49:33,596.596 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,596.596 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:33,612.612 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:33,612.612 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:34,626.626 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:34,626.626 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:34,635.635 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:34,635.635 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:34,643.643 INFO:__main__:Post-mount connections: [43, 47, 48, 49]
2016-07-14 21:49:34,644.644 INFO:__main__:run args=['touch', 'parent1/mydir/afile']
2016-07-14 21:49:34,644.644 INFO:__main__:Running ['touch', 'parent1/mydir/afile']
2016-07-14 21:49:34,693.693 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,694.694 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,706.706 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:34,706.706 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:34,706.706 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,706.706 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,728.728 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,729.729 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:34,742.742 INFO:teuthology.orchestra.run:waiting for 900
2016-07-14 21:49:40,745.745 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,745.745 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,752.752 INFO:__main__:run args=['ls', 'parent1/mydir']
2016-07-14 21:49:40,752.752 INFO:__main__:Running ['ls', 'parent1/mydir']
2016-07-14 21:49:40,761.761 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,762.762 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,769.769 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:40,769.769 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:40,769.769 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,769.769 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,801.801 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,801.801 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,816.816 INFO:__main__:kill
2016-07-14 21:49:40,816.816 INFO:__main__:kill: killing pid 15797 (['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0'])
ceph-fuse[15797]: starting ceph client
2016-07-14 21:49:32.455607 7fa938e2cf40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:32.455746 7fa938e2cf40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:32.457752 7fa938e2cf40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:32.458410 7fa938e2cf40 -1 init, newargv = 0x9b09080 newargc=11
ceph-fuse[15797]: starting fuse
ceph-fuse[15797]: fuse finished with error 0 and tester_r 0
2016-07-14 21:49:40,818.818 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,818.818 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,829.829 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,829.829 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:40,837.837 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:40,837.837 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,838.838 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,850.850 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:40,850.850 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:49:40,861.861 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.2
2016-07-14 21:49:40,861.861 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:40,862.862 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:49:40,869.869 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:40,869.869 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:49:40,881.881 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.3
2016-07-14 21:49:40,881.881 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:40,881.881 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:49:40,889.889 INFO:__main__:test_15303 (tasks.cephfs.test_volume_client.TestVolumeClient) ... ok
2016-07-14 21:49:40,890.890 INFO:__main__:Stopped test: test_15303 (tasks.cephfs.test_volume_client.TestVolumeClient) in 34.690646s
2016-07-14 21:49:40,891.891 INFO:__main__:Starting test: test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient)
2016-07-14 21:49:40,891.891 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:40,892.892 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:40,901.901 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:40,901.901 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:40,901.901 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:40,916.916 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:40,916.916 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:40,917.917 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:40,947.947 INFO:__main__:No match for mds a: PID TTY TIME CMD
1 ? 00:00:02 systemd
2 ? 00:00:00 kthreadd
3 ? 00:00:00 ksoftirqd/0
5 ? 00:00:00 kworker/0:0H
7 ? 00:00:36 rcu_sched
8 ? 00:00:00 rcu_bh
9 ? 00:00:15 rcuos/0
10 ? 00:00:00 rcuob/0
11 ? 00:00:00 migration/0
12 ? 00:00:00 watchdog/0
13 ? 00:00:00 watchdog/1
14 ? 00:00:00 migration/1
15 ? 00:00:00 ksoftirqd/1
17 ? 00:00:00 kworker/1:0H
18 ? 00:00:06 rcuos/1
19 ? 00:00:00 rcuob/1
20 ? 00:00:00 watchdog/2
21 ? 00:00:00 migration/2
22 ? 00:00:00 ksoftirqd/2
24 ? 00:00:00 kworker/2:0H
25 ? 00:00:14 rcuos/2
26 ? 00:00:00 rcuob/2
27 ? 00:00:00 watchdog/3
28 ? 00:00:00 migration/3
29 ? 00:00:00 ksoftirqd/3
31 ? 00:00:00 kworker/3:0H
32 ? 00:00:05 rcuos/3
33 ? 00:00:00 rcuob/3
34 ? 00:00:00 kdevtmpfs
35 ? 00:00:00 netns
36 ? 00:00:00 writeback
37 ? 00:00:00 ksmd
38 ? 00:00:00 khugepaged
39 ? 00:00:00 crypto
40 ? 00:00:00 kintegrityd
41 ? 00:00:00 bioset
42 ? 00:00:00 kblockd
43 ? 00:00:00 ata_sff
44 ? 00:00:00 md
45 ? 00:00:00 devfreq_wq
51 ? 00:00:13 kswapd0
52 ? 00:00:00 vmstat
93 ? 00:00:00 kthrotld
94 ? 00:00:00 acpi_thermal_pm
95 ? 00:00:00 scsi_eh_0
96 ? 00:00:00 scsi_tmf_0
97 ? 00:00:00 scsi_eh_1
98 ? 00:00:00 scsi_tmf_1
99 ? 00:00:00 scsi_eh_2
100 ? 00:00:00 scsi_tmf_2
104 ? 00:00:00 kpsmoused
106 ? 00:00:00 dm_bufio_cache
107 ? 00:00:00 ipv6_addrconf
138 ? 00:00:00 deferwq
140 ? 00:00:00 bioset
178 ? 00:00:00 kauditd
389 ? 00:00:01 kworker/0:1H
399 ? 00:00:00 kworker/1:1H
400 ? 00:00:00 kworker/3:1H
420 ? 00:00:00 rtsx_pci_sdmmc_
425 ? 00:00:00 kworker/2:1H
613 ? 00:00:00 kdmflush
621 ? 00:00:00 bioset
622 ? 00:00:00 kcryptd_io
623 ? 00:00:00 kcryptd
624 ? 00:00:08 dmcrypt_write
625 ? 00:00:00 bioset
688 ? 00:00:00 kdmflush
691 ? 00:00:00 bioset
700 ? 00:00:00 kdmflush
701 ? 00:00:00 bioset
724 ? 00:00:00 jbd2/dm-1-8
725 ? 00:00:00 ext4-rsv-conver
823 ? 00:00:02 systemd-journal
861 ? 00:00:00 rpciod
880 ? 00:00:00 systemd-udevd
910 ? 00:00:00 ktpacpid
918 ? 00:00:00 irq/47-mei_me
949 ? 00:00:00 kmemstick
968 ? 00:00:00 cfg80211
984 ? 00:00:07 irq/50-iwlwifi
999 ? 00:00:00 kworker/u17:0
1001 ? 00:00:00 hci0
1002 ? 00:00:00 hci0
1007 ? 00:00:00 kworker/u17:2
1015 ? 00:00:00 kdmflush
1016 ? 00:00:00 bioset
1018 ? 00:00:00 kvm-irqfd-clean
1071 ? 00:00:00 jbd2/sda1-8
1072 ? 00:00:00 ext4-rsv-conver
1077 ? 00:00:06 jbd2/dm-3-8
1078 ? 00:00:00 ext4-rsv-conver
1105 ? 00:00:00 auditd
1119 ? 00:00:00 audispd
1123 ? 00:00:00 sedispatch
1133 ? 00:00:00 bluetoothd
1137 ? 00:00:00 mcelog
1139 ? 00:00:39 rngd
1146 ? 00:00:00 gssproxy
1154 ? 00:00:00 ModemManager
1158 ? 00:00:04 udisksd
1159 ? 00:00:00 firewalld
1169 ? 00:00:00 systemd-logind
1173 ? 00:00:00 accounts-daemon
1177 ? 00:00:00 alsactl
1249 ? 00:00:00 abrtd
1262 ? 00:00:00 iprt-VBoxWQueue
1268 ? 00:00:00 iprt-VBoxTscThr
1281 ? 00:00:00 abrt-dump-journ
1286 ? 00:00:00 abrt-dump-journ
1324 ? 00:00:12 NetworkManager
1376 ? 00:00:00 libvirtd
1393 ? 00:00:00 crond
1396 ? 00:00:00 atd
1398 ? 00:00:00 gdm
1463 ? 00:00:01 wpa_supplicant
1509 ? 00:00:00 gdm-session-wor
1582 ? 00:00:00 upowerd
1680 ? 00:00:10 packagekitd
1839 ? 00:00:00 dhclient
1903 ? 00:00:00 gdm-session-wor
1938 tty2 00:00:00 xf86-video-inte
2067 ? 00:00:00 krfcommd
2206 ? 00:00:00 cupsd
2235 ? 00:00:00 fwupd
2448 ? 00:00:00 dhclient
2633 ? 00:00:00 kworker/0:1
3387 ? 00:00:00 kworker/3:1
3927 ? 00:00:04 kworker/u16:0
5168 ? 00:00:00 kworker/1:3
13278 ? 00:00:00 kworker/u16:5
13551 ? 00:00:00 kworker/u16:1
13663 ? 00:00:00 kworker/1:1
13668 ? 00:00:00 kworker/3:0
13681 ? 00:00:00 kworker/2:2
13694 ? 00:00:00 kworker/0:3
13724 ? 00:00:00 kworker/u16:4
13765 ? 00:00:00 kworker/1:2
13781 ? 00:00:00 kworker/2:0
13794 ? 00:00:00 kworker/3:3
13799 ? 00:00:00 kworker/0:2
14835 pts/1 00:00:00 sudo
14848 pts/1 00:00:00 su
14853 pts/1 00:00:00 bash
14902 ? 00:00:00 kworker/2:3
14903 pts/1 00:00:00 python
15764 ? 00:00:00 fprintd
15897 pts/1 00:00:00 ps
16120 ? 00:00:00 kworker/2:1
18809 ? 00:00:00 ceph-msgr
18811 ? 00:00:00 rbd
28322 ? 00:00:00 systemd
28332 ? 00:00:00 (sd-pam)
28799 ? 00:00:10 kworker/u16:2
29866 ? 00:00:00 kworker/3:2
30529 ? 00:00:00 dio/dm-3
31742 ? 00:00:00 kworker/0:0
32711 ? 00:00:00 kworker/1:0
2016-07-14 21:49:40,948.948 ERROR:__main__:tried to stop a non-running daemon
2016-07-14 21:49:40,949.949 INFO:__main__:run args=['./bin/ceph', 'fs', 'ls', '--format=json-pretty']
2016-07-14 21:49:40,950.950 INFO:__main__:Running ['./bin/ceph', 'fs', 'ls', '--format=json-pretty']
2016-07-14 21:49:41,133.133 INFO:__main__:run args=['./bin/ceph', 'fs', 'set', u'cephfs', 'cluster_down', 'true']
2016-07-14 21:49:41,133.133 INFO:__main__:Running ['./bin/ceph', 'fs', 'set', u'cephfs', 'cluster_down', 'true']
2016-07-14 21:49:41,578.578 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', u'cephfs', '--format=json-pretty']
2016-07-14 21:49:41,578.578 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', u'cephfs', '--format=json-pretty']
2016-07-14 21:49:41,770.770 INFO:__main__:run args=['./bin/ceph', 'mds', 'fail', '4123']
2016-07-14 21:49:41,770.770 INFO:__main__:Running ['./bin/ceph', 'mds', 'fail', '4123']
2016-07-14 21:49:42,617.617 INFO:__main__:run args=['./bin/ceph', 'fs', 'rm', u'cephfs', '--yes-i-really-mean-it']
2016-07-14 21:49:42,617.617 INFO:__main__:Running ['./bin/ceph', 'fs', 'rm', u'cephfs', '--yes-i-really-mean-it']
2016-07-14 21:49:43,655.655 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_metadata', u'cephfs_metadata', '--yes-i-really-really-mean-it']
2016-07-14 21:49:43,655.655 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_metadata', u'cephfs_metadata', '--yes-i-really-really-mean-it']
2016-07-14 21:49:44,708.708 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_data', u'cephfs_data', '--yes-i-really-really-mean-it']
2016-07-14 21:49:44,708.708 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'delete', u'cephfs_data', u'cephfs_data', '--yes-i-really-really-mean-it']
2016-07-14 21:49:45,790.790 INFO:__main__:run args=['./bin/ceph', 'daemon', 'osd.0', 'config', 'get', 'osd_mon_report_interval_max']
2016-07-14 21:49:45,790.790 INFO:__main__:Running ['./bin/ceph', 'daemon', 'osd.0', 'config', 'get', 'osd_mon_report_interval_max']
2016-07-14 21:49:45,890.890 INFO:tasks.cephfs.filesystem:_json_asok output: {
"osd_mon_report_interval_max": "5"
}

2016-07-14 21:49:45,891.891 INFO:__main__:run args=['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:49:45,891.891 INFO:__main__:Running ['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:49:46,093.093 INFO:__main__:run args=['./bin/ceph', 'osd', 'blacklist', 'clear']
2016-07-14 21:49:46,093.093 INFO:__main__:Running ['./bin/ceph', 'osd', 'blacklist', 'clear']
2016-07-14 21:49:46,835.835 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.0', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:46,835.835 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.0', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,034.034 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.1', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,034.034 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.1', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,255.255 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.2', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,255.255 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.2', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,445.445 INFO:__main__:run args=['./bin/ceph', 'auth', 'caps', 'client.3', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,446.446 INFO:__main__:Running ['./bin/ceph', 'auth', 'caps', 'client.3', 'mds', 'allow', 'mon', 'allow r', 'osd', 'allow rw pool=cephfs_data']
2016-07-14 21:49:47,665.665 INFO:tasks.cephfs.cephfs_test_case:['0', '1', '2', '3']
2016-07-14 21:49:47,665.665 INFO:__main__:run args=['./bin/ceph', 'auth', 'list', '--format=json-pretty']
2016-07-14 21:49:47,666.666 INFO:__main__:Running ['./bin/ceph', 'auth', 'list', '--format=json-pretty']
2016-07-14 21:49:47,855.855 INFO:tasks.cephfs.filesystem:Creating filesystem 'cephfs'
2016-07-14 21:49:47,855.855 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_min_per_osd']
2016-07-14 21:49:47,855.855 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_min_per_osd']
2016-07-14 21:49:47,957.957 INFO:tasks.cephfs.filesystem:_json_asok output: {
"mon_pg_warn_min_per_osd": "3"
}

2016-07-14 21:49:47,958.958 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_metadata', '9']
2016-07-14 21:49:47,958.958 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_metadata', '9']
2016-07-14 21:49:48,860.860 INFO:__main__:run args=['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_data', '9']
2016-07-14 21:49:48,860.860 INFO:__main__:Running ['./bin/ceph', 'osd', 'pool', 'create', 'cephfs_data', '9']
2016-07-14 21:49:49,917.917 INFO:__main__:run args=['./bin/ceph', 'fs', 'new', 'cephfs', 'cephfs_metadata', 'cephfs_data']
2016-07-14 21:49:49,918.918 INFO:__main__:Running ['./bin/ceph', 'fs', 'new', 'cephfs', 'cephfs_metadata', 'cephfs_data']
2016-07-14 21:49:50,479.479 INFO:__main__:run args=['ps', '-u0']
2016-07-14 21:49:50,479.479 INFO:__main__:Running ['ps', '-u0']
2016-07-14 21:49:50,496.496 INFO:__main__:No match for mds a: PID TTY TIME CMD
1 ? 00:00:02 systemd
2 ? 00:00:00 kthreadd
3 ? 00:00:00 ksoftirqd/0
5 ? 00:00:00 kworker/0:0H
7 ? 00:00:36 rcu_sched
8 ? 00:00:00 rcu_bh
9 ? 00:00:15 rcuos/0
10 ? 00:00:00 rcuob/0
11 ? 00:00:00 migration/0
12 ? 00:00:00 watchdog/0
13 ? 00:00:00 watchdog/1
14 ? 00:00:00 migration/1
15 ? 00:00:00 ksoftirqd/1
17 ? 00:00:00 kworker/1:0H
18 ? 00:00:06 rcuos/1
19 ? 00:00:00 rcuob/1
20 ? 00:00:00 watchdog/2
21 ? 00:00:00 migration/2
22 ? 00:00:00 ksoftirqd/2
24 ? 00:00:00 kworker/2:0H
25 ? 00:00:14 rcuos/2
26 ? 00:00:00 rcuob/2
27 ? 00:00:00 watchdog/3
28 ? 00:00:00 migration/3
29 ? 00:00:00 ksoftirqd/3
31 ? 00:00:00 kworker/3:0H
32 ? 00:00:05 rcuos/3
33 ? 00:00:00 rcuob/3
34 ? 00:00:00 kdevtmpfs
35 ? 00:00:00 netns
36 ? 00:00:00 writeback
37 ? 00:00:00 ksmd
38 ? 00:00:00 khugepaged
39 ? 00:00:00 crypto
40 ? 00:00:00 kintegrityd
41 ? 00:00:00 bioset
42 ? 00:00:00 kblockd
43 ? 00:00:00 ata_sff
44 ? 00:00:00 md
45 ? 00:00:00 devfreq_wq
51 ? 00:00:13 kswapd0
52 ? 00:00:00 vmstat
93 ? 00:00:00 kthrotld
94 ? 00:00:00 acpi_thermal_pm
95 ? 00:00:00 scsi_eh_0
96 ? 00:00:00 scsi_tmf_0
97 ? 00:00:00 scsi_eh_1
98 ? 00:00:00 scsi_tmf_1
99 ? 00:00:00 scsi_eh_2
100 ? 00:00:00 scsi_tmf_2
104 ? 00:00:00 kpsmoused
106 ? 00:00:00 dm_bufio_cache
107 ? 00:00:00 ipv6_addrconf
138 ? 00:00:00 deferwq
140 ? 00:00:00 bioset
178 ? 00:00:00 kauditd
389 ? 00:00:01 kworker/0:1H
399 ? 00:00:00 kworker/1:1H
400 ? 00:00:00 kworker/3:1H
420 ? 00:00:00 rtsx_pci_sdmmc_
425 ? 00:00:00 kworker/2:1H
613 ? 00:00:00 kdmflush
621 ? 00:00:00 bioset
622 ? 00:00:00 kcryptd_io
623 ? 00:00:00 kcryptd
624 ? 00:00:08 dmcrypt_write
625 ? 00:00:00 bioset
688 ? 00:00:00 kdmflush
691 ? 00:00:00 bioset
700 ? 00:00:00 kdmflush
701 ? 00:00:00 bioset
724 ? 00:00:00 jbd2/dm-1-8
725 ? 00:00:00 ext4-rsv-conver
823 ? 00:00:02 systemd-journal
861 ? 00:00:00 rpciod
880 ? 00:00:00 systemd-udevd
910 ? 00:00:00 ktpacpid
918 ? 00:00:00 irq/47-mei_me
949 ? 00:00:00 kmemstick
968 ? 00:00:00 cfg80211
984 ? 00:00:07 irq/50-iwlwifi
999 ? 00:00:00 kworker/u17:0
1001 ? 00:00:00 hci0
1002 ? 00:00:00 hci0
1007 ? 00:00:00 kworker/u17:2
1015 ? 00:00:00 kdmflush
1016 ? 00:00:00 bioset
1018 ? 00:00:00 kvm-irqfd-clean
1071 ? 00:00:00 jbd2/sda1-8
1072 ? 00:00:00 ext4-rsv-conver
1077 ? 00:00:06 jbd2/dm-3-8
1078 ? 00:00:00 ext4-rsv-conver
1105 ? 00:00:00 auditd
1119 ? 00:00:00 audispd
1123 ? 00:00:00 sedispatch
1133 ? 00:00:00 bluetoothd
1137 ? 00:00:00 mcelog
1139 ? 00:00:39 rngd
1146 ? 00:00:00 gssproxy
1154 ? 00:00:00 ModemManager
1158 ? 00:00:04 udisksd
1159 ? 00:00:00 firewalld
1169 ? 00:00:00 systemd-logind
1173 ? 00:00:00 accounts-daemon
1177 ? 00:00:00 alsactl
1249 ? 00:00:00 abrtd
1262 ? 00:00:00 iprt-VBoxWQueue
1268 ? 00:00:00 iprt-VBoxTscThr
1281 ? 00:00:00 abrt-dump-journ
1286 ? 00:00:00 abrt-dump-journ
1324 ? 00:00:12 NetworkManager
1376 ? 00:00:00 libvirtd
1393 ? 00:00:00 crond
1396 ? 00:00:00 atd
1398 ? 00:00:00 gdm
1463 ? 00:00:01 wpa_supplicant
1509 ? 00:00:00 gdm-session-wor
1582 ? 00:00:00 upowerd
1680 ? 00:00:10 packagekitd
1839 ? 00:00:00 dhclient
1903 ? 00:00:00 gdm-session-wor
1938 tty2 00:00:00 xf86-video-inte
2067 ? 00:00:00 krfcommd
2206 ? 00:00:00 cupsd
2235 ? 00:00:00 fwupd
2448 ? 00:00:00 dhclient
2633 ? 00:00:00 kworker/0:1
3387 ? 00:00:00 kworker/3:1
3927 ? 00:00:04 kworker/u16:0
5168 ? 00:00:00 kworker/1:3
13278 ? 00:00:00 kworker/u16:5
13551 ? 00:00:00 kworker/u16:1
13663 ? 00:00:00 kworker/1:1
13668 ? 00:00:00 kworker/3:0
13681 ? 00:00:00 kworker/2:2
13694 ? 00:00:00 kworker/0:3
13724 ? 00:00:00 kworker/u16:4
13765 ? 00:00:00 kworker/1:2
13781 ? 00:00:00 kworker/2:0
13794 ? 00:00:00 kworker/3:3
13799 ? 00:00:00 kworker/0:2
14835 pts/1 00:00:00 sudo
14848 pts/1 00:00:00 su
14853 pts/1 00:00:00 bash
14902 ? 00:00:00 kworker/2:3
14903 pts/1 00:00:00 python
15764 ? 00:00:00 fprintd
16120 ? 00:00:00 kworker/2:1
16419 pts/1 00:00:00 ps
18809 ? 00:00:00 ceph-msgr
18811 ? 00:00:00 rbd
28322 ? 00:00:00 systemd
28332 ? 00:00:00 (sd-pam)
28799 ? 00:00:10 kworker/u16:2
29866 ? 00:00:00 kworker/3:2
30529 ? 00:00:00 dio/dm-3
31742 ? 00:00:00 kworker/0:0
32711 ? 00:00:00 kworker/1:0
2016-07-14 21:49:50,497.497 INFO:__main__:run args=['./bin/./ceph-mds', '-i', 'a']
2016-07-14 21:49:50,497.497 INFO:__main__:Running ['./bin/./ceph-mds', '-i', 'a']
2016-07-14 21:49:50,532.532 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:50,533.533 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:50,736.736 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4147}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 5, u'failed': [], u'epoch': 15, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [6], u'info': {u'gid_4147': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 15, u'standby_replay': False, u'state_seq': 1, u'standby_for_fscid': -1, u'state': u'up:creating', u'gid': 4147, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/15180'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:50.110782', u'enabled': True, u'modified': u'2016-07-14 21:49:50.110782', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:50,736.736 WARNING:tasks.cephfs.filesystem:Unhealthy mds state gid_4147:up:creating
2016-07-14 21:49:51,736.736 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:51,737.737 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:51,921.921 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4147}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 5, u'failed': [], u'epoch': 15, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [6], u'info': {u'gid_4147': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 15, u'standby_replay': False, u'state_seq': 1, u'standby_for_fscid': -1, u'state': u'up:creating', u'gid': 4147, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/15180'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:50.110782', u'enabled': True, u'modified': u'2016-07-14 21:49:50.110782', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:51,921.921 WARNING:tasks.cephfs.filesystem:Unhealthy mds state gid_4147:up:creating
2016-07-14 21:49:52,922.922 INFO:__main__:run args=['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:52,922.922 INFO:__main__:Running ['./bin/ceph', 'fs', 'get', 'cephfs', '--format=json-pretty']
2016-07-14 21:49:53,106.106 INFO:tasks.cephfs.filesystem:are_daemons_healthy: mds map: {u'session_autoclose': 300, u'up': {u'mds_0': 4147}, u'last_failure_osd_epoch': 0, u'in': [0], u'last_failure': 0, u'max_file_size': 1099511627776, u'explicitly_allowed_features': 0, u'damaged': [], u'tableserver': 0, u'metadata_pool': 5, u'failed': [], u'epoch': 16, u'flags': 0, u'max_mds': 1, u'compat': {u'compat': {}, u'ro_compat': {}, u'incompat': {u'feature_8': u'file layout v2', u'feature_2': u'client writeable ranges', u'feature_3': u'default file layouts on dirs', u'feature_1': u'base v0.20', u'feature_6': u'dirfrag is stored in omap', u'feature_4': u'dir inode in separate object', u'feature_5': u'mds uses versioned encoding'}}, u'data_pools': [6], u'info': {u'gid_4147': {u'standby_for_rank': -1, u'export_targets': [], u'name': u'a', u'incarnation': 15, u'standby_replay': False, u'state_seq': 4, u'standby_for_fscid': -1, u'state': u'up:active', u'gid': 4147, u'features': 576460752032890879, u'rank': 0, u'standby_for_name': u'', u'addr': u'10.70.1.141:6812/15180'}}, u'fs_name': u'cephfs', u'created': u'2016-07-14 21:49:50.110782', u'enabled': True, u'modified': u'2016-07-14 21:49:50.110782', u'session_timeout': 60, u'stopped': [], u'ever_allowed_features': 0, u'root': 0}
2016-07-14 21:49:53,106.106 INFO:tasks.cephfs.filesystem:are_daemons_healthy: 1/1
2016-07-14 21:49:53,106.106 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mds.a', 'status']
2016-07-14 21:49:53,107.107 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mds.a', 'status']
2016-07-14 21:49:53,223.223 INFO:tasks.cephfs.filesystem:_json_asok output: {
"cluster_fsid": "ae66e242-d103-408a-b4cf-f160f95d365c",
"whoami": 0,
"want_state": "up:active",
"state": "up:active",
"mdsmap_epoch": 16,
"osdmap_epoch": 24,
"osdmap_epoch_barrier": 24
}

2016-07-14 21:49:53,224.224 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,224.224 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.0': No such file or directory
2016-07-14 21:49:53,237.237 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:53,237.237 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,238.238 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,247.247 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,247.247 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:53,263.263 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,264.264 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,275.275 INFO:__main__:Pre-mount connections: [43, 47]
2016-07-14 21:49:53,275.275 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,275.275 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:53,282.282 INFO:__main__:Mounting client.0 with pid 16513
2016-07-14 21:49:53,283.283 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,283.283 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:53,297.297 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:53,297.297 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,312.312 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,313.313 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:54,321.321 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,321.321 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,332.332 INFO:__main__:Post-mount connections: [43, 47, 48]
2016-07-14 21:49:54,333.333 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:54,334.334 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:54,343.343 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:49:54,343.343 INFO:__main__:run args=['sudo', 'chmod', '1777', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:54,344.344 INFO:__main__:Running ['chmod', '1777', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:49:54,357.357 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,357.357 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:49:54,364.364 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:54,364.364 INFO:__main__:run args=['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,365.365 INFO:__main__:Running ['mkdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,372.372 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,373.373 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:54,388.388 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,388.388 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,396.396 INFO:__main__:Pre-mount connections: [43, 47, 48]
2016-07-14 21:49:54,396.396 INFO:__main__:run args=['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,396.396 INFO:__main__:Running ['./bin/ceph-fuse', '-f', '--name', 'client.1', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:54,408.408 INFO:__main__:Mounting client.1 with pid 16555
2016-07-14 21:49:54,410.410 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,411.411 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:54,426.426 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:54,427.427 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:55,440.440 INFO:__main__:run args=['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
2016-07-14 21:49:55,440.440 INFO:__main__:Running ['mount', '-t', 'fusectl', '/sys/fs/fuse/connections', '/sys/fs/fuse/connections']
mount: /sys/fs/fuse/connections is already mounted or /sys/fs/fuse/connections busy
/sys/fs/fuse/connections is already mounted on /sys/fs/fuse/connections
2016-07-14 21:49:55,450.450 INFO:__main__:run args=['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:55,452.452 INFO:__main__:Running ['ls', '/sys/fs/fuse/connections']
2016-07-14 21:49:55,461.461 INFO:__main__:Post-mount connections: [43, 47, 48, 49]
2016-07-14 21:49:55,461.461 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,461.461 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,476.476 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:55,476.476 INFO:__main__:run args=['sudo', 'chmod', '1777', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,476.476 INFO:__main__:Running ['chmod', '1777', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,487.487 INFO:__main__:Searching for existing instance mon pg warn max per osd/global
2016-07-14 21:49:55,488.488 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,489.489 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,497.497 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:49:55,497.497 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:49:55,498.498 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,498.498 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,524.524 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,524.524 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:49:55,534.534 INFO:teuthology.orchestra.run:waiting for 900
2016-07-14 21:50:01,538.538 INFO:__main__:run args=['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:50:01,539.539 INFO:__main__:Running ['rmdir', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:50:01,546.546 INFO:__main__:run args=['./bin/ceph', 'auth', 'get-or-create', 'client.manila', 'mds', 'allow *', 'osd', 'allow rw', 'mon', 'allow *']
2016-07-14 21:50:01,546.546 INFO:__main__:Running ['./bin/ceph', 'auth', 'get-or-create', 'client.manila', 'mds', 'allow *', 'osd', 'allow rw', 'mon', 'allow *']
2016-07-14 21:50:01,739.739 INFO:__main__:run args=['sudo', 'python', '-c', 'import shutil, sys; shutil.copyfileobj(sys.stdin, file(sys.argv[1], "wb"))', '/home/rraja/git/ceph/build/client.manila.keyring']
2016-07-14 21:50:01,739.739 INFO:__main__:Running ['python', '-c', 'import shutil, sys; shutil.copyfileobj(sys.stdin, file(sys.argv[1], "wb"))', '/home/rraja/git/ceph/build/client.manila.keyring']
2016-07-14 21:50:01,763.763 INFO:__main__:Searching for existing instance mon pg warn max per osd/global
2016-07-14 21:50:01,763.763 INFO:__main__:Searching for existing instance keyring/client.manila
2016-07-14 21:50:01,764.764 INFO:__main__:run args=['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:50:01,764.764 INFO:__main__:Running ['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:50:01,940.940 INFO:__main__:run args=['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_max_per_osd']
2016-07-14 21:50:01,940.940 INFO:__main__:Running ['./bin/ceph', 'daemon', 'mon.a', 'config', 'get', 'mon_pg_warn_max_per_osd']
2016-07-14 21:50:02,048.048 INFO:tasks.cephfs.filesystem:_json_asok output: {
"mon_pg_warn_max_per_osd": "300"
}

2016-07-14 21:50:02,048.048 INFO:tasks.cephfs.test_volume_client:max_per_osd 300
2016-07-14 21:50:02,048.048 INFO:tasks.cephfs.test_volume_client:osd_count 3
2016-07-14 21:50:02,048.048 INFO:tasks.cephfs.test_volume_client:max_overall 900
2016-07-14 21:50:02,049.049 INFO:tasks.cephfs.test_volume_client:existing_pg_count 26
2016-07-14 21:50:02,049.049 INFO:tasks.cephfs.test_volume_client:expected_pg_num 87
2016-07-14 21:50:02,049.049 INFO:__main__:run args=['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:50:02,049.049 INFO:__main__:Running ['./bin/ceph', 'osd', 'dump', '--format=json-pretty']
2016-07-14 21:50:02,234.234 INFO:__main__:run args=['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']
2016-07-14 21:50:02,235.235 INFO:__main__:Running ['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']
src/tcmalloc.cc:283] Attempt to free invalid pointer 0x561bc63d3940
2016-07-14 21:50:02,503.503 INFO:__main__:test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient) ... ERROR
2016-07-14 21:50:02,504.504 ERROR:__main__:Traceback (most recent call last):
File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 370, in test_data_isolated
guest_entity=guest_entity
File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 34, in _volume_client_python
""".format(payload=script, conf_path=client.config_path, vol_prefix=vol_prefix, ns_prefix=ns_prefix))
File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/mount.py", line 134, in run_python
p.wait()
File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/vstart_runner.py", line 113, in wait
raise CommandFailedError(self.args, self.exitstatus)
CommandFailedError: Command failed with status -6: ['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']

2016-07-14 21:50:02,504.504 ERROR:__main__:Error in test 'test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient)', going interactive
Ceph test interactive mode, use ctx to interact with the cluster, press control-D to exit...
>>>
2016-07-14 21:56:08,890.890 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,890.890 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,901.901 INFO:tasks.cephfs.fuse_mount:ceph-fuse is mounted on /tmp/tmp0EnRmD/mnt.0
2016-07-14 21:56:08,901.901 INFO:tasks.cephfs.fuse_mount:Running fusermount -u on local...
2016-07-14 21:56:08,901.901 INFO:__main__:run args=['sudo', 'fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,901.901 INFO:__main__:Running ['fusermount', '-u', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,920.920 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,920.920 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,932.932 INFO:__main__:kill
2016-07-14 21:56:08,932.932 INFO:__main__:kill: killing pid 16513 (['./bin/ceph-fuse', '-f', '--name', 'client.0', '/tmp/tmp0EnRmD/mnt.0'])
ceph-fuse[16513]: starting ceph client
2016-07-14 21:49:53.297548 7facfb952f40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:53.297636 7facfb952f40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:53.303164 7facfb952f40 -1 WARNING: the following dangerous and experimental features are enabled: *
2016-07-14 21:49:53.303889 7facfb952f40 -1 init, newargv = 0xa149080 newargc=11
ceph-fuse[16513]: starting fuse
ceph-fuse[16513]: fuse finished with error 0 and tester_r 0
2016-07-14 21:56:08,936.936 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,936.936 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.0']
2016-07-14 21:56:08,945.945 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:56:08,945.945 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.1']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.1': No such file or directory
2016-07-14 21:56:08,956.956 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.1
2016-07-14 21:56:08,956.956 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:56:08,956.956 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.1']
2016-07-14 21:56:08,968.968 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:56:08,969.969 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.2']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.2': No such file or directory
2016-07-14 21:56:08,976.976 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.2
2016-07-14 21:56:08,977.977 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:56:08,977.977 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.2']
2016-07-14 21:56:08,985.985 INFO:__main__:run args=['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:56:08,985.985 INFO:__main__:Running ['stat', '--file-system', '--printf=%T\n', '--', '/tmp/tmp0EnRmD/mnt.3']
stat: cannot read file system information for '/tmp/tmp0EnRmD/mnt.3': No such file or directory
2016-07-14 21:56:08,995.995 INFO:tasks.cephfs.fuse_mount:mount point does not exist: /tmp/tmp0EnRmD/mnt.3
2016-07-14 21:56:08,995.995 INFO:__main__:run args=['rm', '-rf', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:56:08,995.995 INFO:__main__:Running ['rm', '-rf', '/tmp/tmp0EnRmD/mnt.3']
2016-07-14 21:56:09,006.006 INFO:__main__:Searching for existing instance mon pg warn max per osd/global
2016-07-14 21:56:09,007.007 INFO:__main__:Stopped test: test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient) in 388.115495s
2016-07-14 21:56:09,007.007 INFO:__main__:
2016-07-14 21:56:09,007.007 INFO:__main__:======================================================================
2016-07-14 21:56:09,007.007 INFO:__main__:ERROR: test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient)
2016-07-14 21:56:09,007.007 INFO:__main__:----------------------------------------------------------------------
2016-07-14 21:56:09,007.007 INFO:__main__:Traceback (most recent call last):
2016-07-14 21:56:09,007.007 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 370, in test_data_isolated
2016-07-14 21:56:09,007.007 INFO:__main__: guest_entity=guest_entity
2016-07-14 21:56:09,007.007 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 34, in _volume_client_python
2016-07-14 21:56:09,008.008 INFO:__main__: """.format(payload=script, conf_path=client.config_path, vol_prefix=vol_prefix, ns_prefix=ns_prefix))
2016-07-14 21:56:09,008.008 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/mount.py", line 134, in run_python
2016-07-14 21:56:09,008.008 INFO:__main__: p.wait()
2016-07-14 21:56:09,008.008 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/vstart_runner.py", line 113, in wait
2016-07-14 21:56:09,008.008 INFO:__main__: raise CommandFailedError(self.args, self.exitstatus)
2016-07-14 21:56:09,008.008 INFO:__main__:CommandFailedError: Command failed with status -6: ['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']
2016-07-14 21:56:09,008.008 INFO:__main__:
2016-07-14 21:56:09,008.008 INFO:__main__:----------------------------------------------------------------------
2016-07-14 21:56:09,008.008 INFO:__main__:Ran 2 tests in 422.808s
2016-07-14 21:56:09,008.008 INFO:__main__:
2016-07-14 21:56:09,008.008 INFO:__main__:FAILED (errors=1)
2016-07-14 21:56:09,008.008 INFO:__main__:
2016-07-14 21:56:09,008.008 INFO:__main__:======================================================================
2016-07-14 21:56:09,008.008 INFO:__main__:ERROR: test_data_isolated (tasks.cephfs.test_volume_client.TestVolumeClient)
2016-07-14 21:56:09,009.009 INFO:__main__:----------------------------------------------------------------------
2016-07-14 21:56:09,009.009 INFO:__main__:Traceback (most recent call last):
2016-07-14 21:56:09,009.009 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 370, in test_data_isolated
2016-07-14 21:56:09,009.009 INFO:__main__: guest_entity=guest_entity
2016-07-14 21:56:09,009.009 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/test_volume_client.py", line 34, in _volume_client_python
2016-07-14 21:56:09,009.009 INFO:__main__: """.format(payload=script, conf_path=client.config_path, vol_prefix=vol_prefix, ns_prefix=ns_prefix))
2016-07-14 21:56:09,009.009 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/mount.py", line 134, in run_python
2016-07-14 21:56:09,009.009 INFO:__main__: p.wait()
2016-07-14 21:56:09,009.009 INFO:__main__: File "/home/rraja/git/ceph-qa-suite/tasks/cephfs/vstart_runner.py", line 113, in wait
2016-07-14 21:56:09,009.009 INFO:__main__: raise CommandFailedError(self.args, self.exitstatus)
2016-07-14 21:56:09,009.009 INFO:__main__:CommandFailedError: Command failed with status -6: ['python', '-c', '\nfrom ceph_volume_client import CephFSVolumeClient, VolumePath\nimport logging\nlog = logging.getLogger("ceph_volume_client")\nlog.addHandler(logging.StreamHandler())\nlog.setLevel(logging.DEBUG)\nvc = CephFSVolumeClient("manila", "./ceph.conf", "ceph", None, None)\nvc.connect()\n\nvp = VolumePath("grpid", "volid")\nvc.create_volume(vp, 10, data_isolated=True)\n\nvc.disconnect()\n ']
2016-07-14 21:56:09,009.009 INFO:__main__:

    (1-1/1)