Project

General

Profile

Bug #16729 ยป test.log

Kefu Chai, 07/19/2016 01:16 AM

 
142/142 Test #104: rados-striper.sh ........................***Failed 300.67 sec
Enivronment Variables Already Set
+ PS4='${BASH_SOURCE[0]}:$LINENO: ${FUNCNAME[0]}: '
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1447: main: export PATH=/tmp/ceph.Ry3/ceph-disk-virtualenv/bin:/tmp/ceph.Ry3/ceph-detect-init-virtualenv/bin:.:/home/jenkins-build/build/workspace/ceph-pull-requests/build/bin:/home/jenkins-build/build/workspace/ceph-pull-requests/src:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1447: main: PATH=/tmp/ceph.Ry3/ceph-disk-virtualenv/bin:/tmp/ceph.Ry3/ceph-detect-init-virtualenv/bin:.:/home/jenkins-build/build/workspace/ceph-pull-requests/build/bin:/home/jenkins-build/build/workspace/ceph-pull-requests/src:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1450: main: export CEPH_CONF=/dev/null
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1450: main: CEPH_CONF=/dev/null
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1451: main: unset CEPH_ARGS
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1453: main: local code
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1454: main: run testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:21: run: local dir=testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:22: run: shift
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:24: run: export CEPH_MON=127.0.0.1:7116
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:24: run: CEPH_MON=127.0.0.1:7116
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:25: run: export CEPH_ARGS
//home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:26: run: uuidgen
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:26: run: CEPH_ARGS+='--fsid=414e5204-fcfb-4176-ab89-aa7c684bd782 --auth-supported=none '
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:27: run: CEPH_ARGS+='--mon-host=127.0.0.1:7116 '
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:30: run: setup testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:93: setup: local dir=testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:94: setup: teardown testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:118: teardown: local dir=testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:119: teardown: kill_daemons testdir/rados-striper KILL
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons: shopt -q -o xtrace
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons: echo true
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons: local trace=true
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:253: kill_daemons: true
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:253: kill_daemons: shopt -u -o xtrace
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:269: kill_daemons: return 0
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:120: teardown: stat -f -c %T .
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:120: teardown: '[' xfs == btrfs ']'
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:123: teardown: rm -fr testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:95: setup: mkdir -p testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:33: run: run_mon testdir/rados-striper a
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:340: run_mon: local dir=testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:341: run_mon: shift
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:342: run_mon: local id=a
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:343: run_mon: shift
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:344: run_mon: local data=testdir/rados-striper/a
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:347: run_mon: ceph-mon --id a --mkfs --mon-data=testdir/rados-striper/a --run-dir=testdir/rados-striper
ceph-mon: mon.noname-a 127.0.0.1:7116/0 is local, renaming to mon.a
ceph-mon: set fsid to 414e5204-fcfb-4176-ab89-aa7c684bd782
ceph-mon: created monfs at testdir/rados-striper/a for mon.a
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:354: run_mon: ceph-mon --id a --mon-osd-full-ratio=.99 --mon-data-avail-crit=1 --paxos-propose-interval=0.1 --osd-crush-chooseleaf-type=0 --erasure-code-dir=/home/jenkins-build/build/workspace/ceph-pull-requests/build/lib --plugin-dir=/home/jenkins-build/build/workspace/ceph-pull-requests/build/lib --debug-mon 20 --debug-ms 20 --debug-paxos 20 --chdir= --mon-data=testdir/rados-striper/a '--log-file=testdir/rados-striper/$name.log' '--admin-socket=testdir/rados-striper/$cluster-$name.asok' --mon-cluster-log-file=testdir/rados-striper/log --run-dir=testdir/rados-striper '--pid-file=testdir/rados-striper/$name.pid'
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:373: run_mon: cat
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:373: run_mon: get_config mon a fsid
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:754: get_config: local daemon=mon
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:755: get_config: local id=a
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:756: get_config: local config=fsid
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config: CEPH_ARGS=
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config: ceph --format xml daemon testdir/rados-striper/ceph-mon.a.asok config get fsid
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:761: get_config: xmlstarlet sel -t -m //fsid -v . -n
-:1.1: Start tag expected, '<' not found
admin_socket: exception getting command descriptions: [Errno 111] Connection ref
^
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:373: run_mon: get_config mon a mon_host
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:754: get_config: local daemon=mon
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:755: get_config: local id=a
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:756: get_config: local config=mon_host
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:761: get_config: xmlstarlet sel -t -m //mon_host -v . -n
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config: CEPH_ARGS=
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config: ceph --format xml daemon testdir/rados-striper/ceph-mon.a.asok config get mon_host
-:1.1: Start tag expected, '<' not found
admin_socket: exception getting command descriptions: [Errno 111] Connection ref
^
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:378: run_mon: get_config mon a mon_initial_members
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:754: get_config: local daemon=mon
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:755: get_config: local id=a
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:756: get_config: local config=mon_initial_members
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config: CEPH_ARGS=
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config: ceph --format xml daemon testdir/rados-striper/ceph-mon.a.asok config get mon_initial_members
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:761: get_config: xmlstarlet sel -t -m //mon_initial_members -v . -n
-:1.1: Start tag expected, '<' not found
admin_socket: exception getting command descriptions: [Errno 111] Connection ref
^
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:378: run_mon: test -z ''
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:379: run_mon: ceph osd pool delete rbd rbd --yes-i-really-really-mean-it
2016-07-18 19:48:50.558700 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa51805ab30 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa51805bde0).fault
2016-07-18 19:48:53.571260 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c000c80 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c001f30).fault
2016-07-18 19:48:56.564706 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c004fe0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c006240).fault
2016-07-18 19:48:59.560102 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c003350 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c002350).fault
2016-07-18 19:49:02.560287 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0089e0).fault
2016-07-18 19:49:05.566868 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c003350 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c000c80).fault
2016-07-18 19:49:08.560964 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c001910).fault
2016-07-18 19:49:11.560969 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c008af0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0014d0).fault
2016-07-18 19:49:14.567297 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00a7a0).fault
2016-07-18 19:49:17.563401 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c008af0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009d50).fault
2016-07-18 19:49:20.568222 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0040e0).fault
2016-07-18 19:49:23.562304 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00be50).fault
2016-07-18 19:49:26.563315 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0065b0).fault
2016-07-18 19:49:29.562744 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c007100).fault
2016-07-18 19:49:32.562918 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0050c0).fault
2016-07-18 19:49:35.563443 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c005d60).fault
2016-07-18 19:49:38.563680 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d4a0).fault
2016-07-18 19:49:41.564038 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e080).fault
2016-07-18 19:49:44.564478 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e4b0).fault
2016-07-18 19:49:47.565892 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00efb0).fault
2016-07-18 19:49:50.565001 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e670).fault
2016-07-18 19:49:53.565318 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009210).fault
2016-07-18 19:49:56.565680 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c010780).fault
2016-07-18 19:49:59.566048 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00c530).fault
2016-07-18 19:50:02.566947 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00bf40).fault
2016-07-18 19:50:05.567101 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:08.567870 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00f830).fault
2016-07-18 19:50:11.568080 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:14.568619 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0008c0).fault
2016-07-18 19:50:17.568944 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:20.569188 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c004540).fault
2016-07-18 19:50:23.569495 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:26.571615 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c002e60).fault
2016-07-18 19:50:29.572054 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:32.572563 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0032b0).fault
2016-07-18 19:50:35.572764 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:38.573088 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009db0).fault
2016-07-18 19:50:41.573692 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:44.574248 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c008b70).fault
2016-07-18 19:50:47.577314 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:50.574486 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c006030).fault
2016-07-18 19:50:53.574253 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:50:56.574525 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c006360).fault
2016-07-18 19:50:59.575699 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:51:02.576571 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e160).fault
2016-07-18 19:51:05.576729 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:51:08.577320 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00ee10).fault
2016-07-18 19:51:11.577895 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
2016-07-18 19:51:14.578784 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00eb60).fault
2016-07-18 19:51:17.578962 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010850 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e750).fault
2016-07-18 19:51:20.579356 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009ac0).fault
2016-07-18 19:51:23.580555 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:51:26.580913 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00cd60).fault
2016-07-18 19:51:29.581275 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:51:32.581522 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011fc0).fault
2016-07-18 19:51:35.581946 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:51:38.582208 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007310 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c008570).fault
2016-07-18 19:51:41.582543 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:51:44.582875 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006750 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00fc50).fault
2016-07-18 19:51:47.583148 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:51:50.583721 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006750 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0019f0).fault
2016-07-18 19:51:53.584144 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:51:56.584416 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006750 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009f00).fault
2016-07-18 19:51:59.584778 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:52:02.585128 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006750 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c002a60).fault
2016-07-18 19:52:05.585373 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:52:08.585905 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006690 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003a70).fault
2016-07-18 19:52:11.586231 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:52:14.586622 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c007770).fault
2016-07-18 19:52:17.587127 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:52:20.587470 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d4f0).fault
2016-07-18 19:52:23.587908 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:52:26.589064 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c005220).fault
2016-07-18 19:52:29.588731 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:52:32.589110 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c005220).fault
2016-07-18 19:52:35.589533 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:52:38.589956 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0097b0).fault
2016-07-18 19:52:41.590413 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
2016-07-18 19:52:44.590786 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009410).fault
2016-07-18 19:52:47.591213 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00ab40 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011e80).fault
2016-07-18 19:52:50.591572 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00c300).fault
2016-07-18 19:52:53.592074 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00ab40 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0134c0).fault
2016-07-18 19:52:56.592364 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0117b0).fault
2016-07-18 19:52:59.592687 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00ab40 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011f90).fault
2016-07-18 19:53:02.593110 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d410).fault
2016-07-18 19:53:05.593463 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00ab40 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011d40).fault
2016-07-18 19:53:08.593904 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d410).fault
2016-07-18 19:53:11.594340 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00aa60 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011d40).fault
2016-07-18 19:53:14.594742 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d410).fault
2016-07-18 19:53:17.595148 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a860 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00a5f0).fault
2016-07-18 19:53:20.595455 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d410).fault
2016-07-18 19:53:23.595955 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a5f0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00fa10).fault
2016-07-18 19:53:26.596358 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003630).fault
2016-07-18 19:53:29.596796 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a3e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003fd0).fault
2016-07-18 19:53:32.597169 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0063e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00bd80).fault
2016-07-18 19:53:35.597676 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a3e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003fd0).fault
2016-07-18 19:53:38.598096 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0063e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c002250).fault
2016-07-18 19:53:41.598617 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a3e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003fd0).fault
2016-07-18 19:53:44.599174 7fa51c16d700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0062c0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c007520).fault
2016-07-18 19:53:47.599627 7fa516ffd700 0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a3e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c005450).fault
2016-07-18 19:53:50.558628 7fa5200ee700 0 monclient(hunting): authenticate timed out after 300
2016-07-18 19:53:50.558717 7fa5200ee700 0 librados: client.admin authentication error (110) Connection timed out
Error connecting to cluster: TimedOut
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:379: run_mon: return 1
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:33: run: return 1
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1457: main: display_logs testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1318: display_logs: local dir=testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1320: display_logs: find testdir/rados-striper -maxdepth 1 -name '*.log'
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1321: display_logs: read file
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1322: display_logs: echo '======================= testdir/rados-striper/mon.a.log'
======================= testdir/rados-striper/mon.a.log
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1323: display_logs: cat testdir/rados-striper/mon.a.log
2016-07-18 19:48:50.192869 7f0c083ea4c0 0 ceph version v11.0.0-674-g8aae974 (8aae9740fad1909e8d010f899e8ed88f672e1859), process ceph-mon, pid 22296
2016-07-18 19:48:50.223847 7f0c083ea4c0 10 load: jerasure load: lrc load: isa
2016-07-18 19:48:50.224092 7f0c083ea4c0 1 leveldb: Recovering log #3
2016-07-18 19:48:50.224123 7f0c083ea4c0 1 leveldb: Level-0 table #5: started
2016-07-18 19:48:50.228717 7f0c083ea4c0 1 leveldb: Level-0 table #5: 559 bytes OK
2016-07-18 19:48:50.230047 7f0c083ea4c0 1 leveldb: Delete type=0 #3

2016-07-18 19:48:50.230077 7f0c083ea4c0 1 leveldb: Delete type=3 #2

2016-07-18 19:48:50.230179 7f0c083ea4c0 10 obtain_monmap
2016-07-18 19:48:50.230195 7f0c083ea4c0 10 obtain_monmap found mkfs monmap
2016-07-18 19:48:50.230249 7f0c083ea4c0 0 starting mon.a rank 0 at 127.0.0.1:7116/0 mon_data testdir/rados-striper/a fsid 414e5204-fcfb-4176-ab89-aa7c684bd782
2016-07-18 19:48:50.230271 7f0c083ea4c0 10 -- :/0 rank.bind 127.0.0.1:7116/0
2016-07-18 19:48:50.230278 7f0c083ea4c0 10 accepter.accepter.bind
2016-07-18 19:48:50.230308 7f0c083ea4c0 10 accepter.accepter.bind bound to 127.0.0.1:7116/0
2016-07-18 19:48:50.230318 7f0c083ea4c0 1 -- 127.0.0.1:7116/0 learned my addr 127.0.0.1:7116/0
2016-07-18 19:48:50.230324 7f0c083ea4c0 1 accepter.accepter.bind my_inst.addr is 127.0.0.1:7116/0 need_addr=0
2016-07-18 19:48:50.230497 7f0c083ea4c0 1 mon.a@-1(probing) e0 preinit fsid 414e5204-fcfb-4176-ab89-aa7c684bd782
2016-07-18 19:48:50.230522 7f0c083ea4c0 10 mon.a@-1(probing) e0 check_fsid cluster_uuid contains '414e5204-fcfb-4176-ab89-aa7c684bd782'
2016-07-18 19:48:50.230534 7f0c083ea4c0 10 mon.a@-1(probing) e0 features compat={},rocompat={},incompat={1=initial feature set (~v.18),3=single paxos with k/v store (v0.?)}
2016-07-18 19:48:50.230541 7f0c083ea4c0 10 mon.a@-1(probing) e0 apply_compatset_features_to_quorum_requirements required_features 0
2016-07-18 19:48:50.230544 7f0c083ea4c0 10 mon.a@-1(probing) e0 required_features 0
2016-07-18 19:48:50.230551 7f0c083ea4c0 10 mon.a@-1(probing) e0 has_ever_joined = 0
2016-07-18 19:48:50.230559 7f0c083ea4c0 10 mon.a@-1(probing) e0 sync_last_committed_floor 0
2016-07-18 19:48:50.230564 7f0c083ea4c0 10 mon.a@-1(probing) e0 init_paxos
2016-07-18 19:48:50.230571 7f0c083ea4c0 10 mon.a@-1(probing).paxos(paxos recovering c 0..0) init last_pn: 0 accepted_pn: 0 last_committed: 0 first_committed: 0
2016-07-18 19:48:50.230577 7f0c083ea4c0 10 mon.a@-1(probing).paxos(paxos recovering c 0..0) init
2016-07-18 19:48:50.230591 7f0c083ea4c0 1 mon.a@-1(probing).mds e0 Unable to load 'last_metadata'
2016-07-18 19:48:50.230624 7f0c083ea4c0 10 mon.a@-1(probing) e0 refresh_from_paxos
2016-07-18 19:48:50.230632 7f0c083ea4c0 10 mon.a@-1(probing) e0 refresh_from_paxos no cluster_fingerprint
2016-07-18 19:48:50.230641 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(pgmap 0..0) refresh
2016-07-18 19:48:50.230652 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(mdsmap 0..0) refresh
2016-07-18 19:48:50.230659 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(osdmap 0..0) refresh
2016-07-18 19:48:50.230667 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(logm 0..0) refresh
2016-07-18 19:48:50.230671 7f0c083ea4c0 10 mon.a@-1(probing).log v0 update_from_paxos
2016-07-18 19:48:50.230673 7f0c083ea4c0 10 mon.a@-1(probing).log v0 update_from_paxos version 0 summary v 0
2016-07-18 19:48:50.230681 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(monmap 0..0) refresh
2016-07-18 19:48:50.230689 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(auth 0..0) refresh
2016-07-18 19:48:50.230693 7f0c083ea4c0 10 mon.a@-1(probing).auth v0 update_from_paxos
2016-07-18 19:48:50.230698 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(pgmap 0..0) post_refresh
2016-07-18 19:48:50.230700 7f0c083ea4c0 10 mon.a@-1(probing).pg v0 post_paxos_update
2016-07-18 19:48:50.230706 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(mdsmap 0..0) post_refresh
2016-07-18 19:48:50.230707 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(osdmap 0..0) post_refresh
2016-07-18 19:48:50.230708 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(logm 0..0) post_refresh
2016-07-18 19:48:50.230709 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(monmap 0..0) post_refresh
2016-07-18 19:48:50.230710 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(auth 0..0) post_refresh
2016-07-18 19:48:50.230712 7f0c083ea4c0 10 mon.a@-1(probing).health(0) init
2016-07-18 19:48:50.230780 7f0c083ea4c0 1 -- 127.0.0.1:7116/0 messenger.start
2016-07-18 19:48:50.230805 7f0c083ea4c0 2 mon.a@-1(probing) e0 init
2016-07-18 19:48:50.230825 7f0c083ea4c0 10 -- 127.0.0.1:7116/0 ready 127.0.0.1:7116/0
2016-07-18 19:48:50.230850 7f0c083ea4c0 1 accepter.accepter.start
2016-07-18 19:48:50.230862 7f0c083ea4c0 10 mon.a@-1(probing) e0 bootstrap
2016-07-18 19:48:50.230865 7f0c083ea4c0 10 mon.a@-1(probing) e0 sync_reset_requester
2016-07-18 19:48:50.230866 7f0c083ea4c0 10 mon.a@-1(probing) e0 unregister_cluster_logger - not registered
2016-07-18 19:48:50.230867 7f0c083ea4c0 10 mon.a@-1(probing) e0 cancel_probe_timeout (none scheduled)
2016-07-18 19:48:50.230867 7f0c083ea4c0 0 mon.a@-1(probing) e0 my rank is now 0 (was -1)
2016-07-18 19:48:50.230869 7f0c083ea4c0 1 -- 127.0.0.1:7116/0 mark_down_all
2016-07-18 19:48:50.230872 7f0c083ea4c0 10 mon.a@0(probing) e0 _reset
2016-07-18 19:48:50.230872 7f0c083ea4c0 10 mon.a@0(probing) e0 cancel_probe_timeout (none scheduled)
2016-07-18 19:48:50.230873 7f0c083ea4c0 10 mon.a@0(probing) e0 timecheck_finish
2016-07-18 19:48:50.230876 7f0c083ea4c0 15 mon.a@0(probing) e0 health_tick_stop
2016-07-18 19:48:50.230877 7f0c083ea4c0 15 mon.a@0(probing) e0 health_interval_stop
2016-07-18 19:48:50.230877 7f0c083ea4c0 10 mon.a@0(probing) e0 scrub_event_cancel
2016-07-18 19:48:50.230879 7f0c083ea4c0 10 mon.a@0(probing) e0 scrub_reset
2016-07-18 19:48:50.230880 7f0c083ea4c0 10 mon.a@0(probing).paxos(paxos recovering c 0..0) restart -- canceling timeouts
2016-07-18 19:48:50.230884 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(pgmap 0..0) restart
2016-07-18 19:48:50.230894 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(mdsmap 0..0) restart
2016-07-18 19:48:50.230895 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(osdmap 0..0) restart
2016-07-18 19:48:50.230896 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(logm 0..0) restart
2016-07-18 19:48:50.230897 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(monmap 0..0) restart
2016-07-18 19:48:50.230897 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(auth 0..0) restart
2016-07-18 19:48:50.230900 7f0c083ea4c0 1 mon.a@0(probing) e0 win_standalone_election
2016-07-18 19:48:50.230909 7f0c083ea4c0 1 mon.a@0(probing).elector(1) init, last seen epoch 1
2016-07-18 19:48:50.230911 7f0c083ea4c0 10 mon.a@0(probing).elector(1) bump_epoch 1 to 2
2016-07-18 19:48:50.231498 7f0c083ea4c0 10 mon.a@0(probing) e0 join_election
2016-07-18 19:48:50.231502 7f0c083ea4c0 10 mon.a@0(probing) e0 _reset
2016-07-18 19:48:50.231502 7f0c083ea4c0 10 mon.a@0(probing) e0 cancel_probe_timeout (none scheduled)
2016-07-18 19:48:50.231503 7f0c083ea4c0 10 mon.a@0(probing) e0 timecheck_finish
2016-07-18 19:48:50.231504 7f0c083ea4c0 15 mon.a@0(probing) e0 health_tick_stop
2016-07-18 19:48:50.231505 7f0c083ea4c0 15 mon.a@0(probing) e0 health_interval_stop
2016-07-18 19:48:50.231505 7f0c083ea4c0 10 mon.a@0(probing) e0 scrub_event_cancel
2016-07-18 19:48:50.231506 7f0c083ea4c0 10 mon.a@0(probing) e0 scrub_reset
2016-07-18 19:48:50.231507 7f0c083ea4c0 10 mon.a@0(probing).paxos(paxos recovering c 0..0) restart -- canceling timeouts
2016-07-18 19:48:50.231510 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(pgmap 0..0) restart
2016-07-18 19:48:50.231511 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(mdsmap 0..0) restart
2016-07-18 19:48:50.231512 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(osdmap 0..0) restart
2016-07-18 19:48:50.231512 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(logm 0..0) restart
2016-07-18 19:48:50.231513 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(monmap 0..0) restart
2016-07-18 19:48:50.231514 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(auth 0..0) restart
2016-07-18 19:48:50.231516 7f0c083ea4c0 10 mon.a@0(electing) e0 win_election epoch 2 quorum 0 features 576460752032890879
2016-07-18 19:48:50.231539 7f0c083ea4c0 0 log_channel(cluster) log [INF] : mon.a@0 won leader election with quorum 0
2016-07-18 19:48:50.231554 7f0c083ea4c0 1 -- 127.0.0.1:7116/0 --> 127.0.0.1:7116/0 -- log(1 entries from seq 1 at 2016-07-18 19:48:50.231541) v1 -- ?+0 0xb28cb40 con 0xb1c8800
2016-07-18 19:48:50.231561 7f0c083ea4c0 20 -- 127.0.0.1:7116/0 submit_message log(1 entries from seq 1 at 2016-07-18 19:48:50.231541) v1 local
2016-07-18 19:48:50.231567 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(monmap 0..0) election_finished
2016-07-18 19:48:50.231569 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(monmap 0..0) _active
2016-07-18 19:48:50.231570 7f0c083ea4c0 7 mon.a@0(leader).paxosservice(monmap 0..0) _active creating new pending
2016-07-18 19:48:50.231580 7f0c083ea4c0 10 mon.a@0(leader).monmap v0 create_pending monmap epoch 1
2016-07-18 19:48:50.231582 7f0c083ea4c0 10 mon.a@0(leader).monmap v0 create_initial using current monmap
2016-07-18 19:48:50.231583 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(monmap 0..0) propose_pending
2016-07-18 19:48:50.231603 7f0c083ea4c0 10 mon.a@0(leader).monmap v0 encode_pending epoch 1
2016-07-18 19:48:50.231637 7f0c083ea4c0 10 mon.a@0(leader) e0 prepare_new_fingerprint proposing cluster_fingerprint 087dfb0a-97f9-4757-a426-8f2e6120e1c0
2016-07-18 19:48:50.231642 7f0c083ea4c0 5 mon.a@0(leader).paxos(paxos active c 0..0) queue_pending_finisher 0xb18c180
2016-07-18 19:48:50.231646 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos active c 0..0) trigger_propose active, proposing now
2016-07-18 19:48:50.231659 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos active c 0..0) propose_pending 1 411 bytes
2016-07-18 19:48:50.231663 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos updating c 0..0) begin for 1 411 bytes
2016-07-18 19:48:50.232058 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos updating c 0..0) commit_start 1
2016-07-18 19:48:50.232072 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) election_finished
2016-07-18 19:48:50.232073 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) _active
2016-07-18 19:48:50.232074 7f0c083ea4c0 7 mon.a@0(leader).paxosservice(pgmap 0..0) _active creating new pending
2016-07-18 19:48:50.232079 7f0c083ea4c0 10 mon.a@0(leader).pg v0 create_pending v 1
2016-07-18 19:48:50.232089 7f0c083ea4c0 10 mon.a@0(leader).pg v0 create_initial -- creating initial map
2016-07-18 19:48:50.232098 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) propose_pending
2016-07-18 19:48:50.232102 7f0c083ea4c0 10 mon.a@0(leader).pg v0 encode_pending v 1
2016-07-18 19:48:50.232118 7f0c083ea4c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c160
2016-07-18 19:48:50.232121 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
2016-07-18 19:48:50.232122 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) election_finished
2016-07-18 19:48:50.232123 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) _active
2016-07-18 19:48:50.232124 7f0c083ea4c0 7 mon.a@0(leader).paxosservice(mdsmap 0..0) _active creating new pending
2016-07-18 19:48:50.232135 7f0c083ea4c0 10 mon.a@0(leader).mds e0 create_pending e1
2016-07-18 19:48:50.232137 7f0c083ea4c0 10 mon.a@0(leader).mds e0 create_initial
2016-07-18 19:48:50.232138 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) propose_pending
2016-07-18 19:48:50.232150 7f0c083ea4c0 10 mon.a@0(leader).mds e0 encode_pending e1
2016-07-18 19:48:50.232172 7f0c083ea4c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c240
2016-07-18 19:48:50.232175 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
2016-07-18 19:48:50.232177 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) election_finished
2016-07-18 19:48:50.232178 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) _active
2016-07-18 19:48:50.232178 7f0c083ea4c0 7 mon.a@0(leader).paxosservice(osdmap 0..0) _active creating new pending
2016-07-18 19:48:50.232184 7f0c083ea4c0 10 mon.a@0(leader).osd e0 create_pending e 1
2016-07-18 19:48:50.232207 7f0c083ea4c0 10 mon.a@0(leader).osd e0 create_pending did clean_temps
2016-07-18 19:48:50.232212 7f0c083ea4c0 10 mon.a@0(leader).osd e0 create_initial for 414e5204-fcfb-4176-ab89-aa7c684bd782
2016-07-18 19:48:50.232420 7f0c083ea4c0 20 mon.a@0(leader).osd e0 full crc 3623640541
2016-07-18 19:48:50.232430 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) propose_pending
2016-07-18 19:48:50.232433 7f0c083ea4c0 10 mon.a@0(leader).osd e0 encode_pending e 1
2016-07-18 19:48:50.232483 7f0c083ea4c0 20 mon.a@0(leader).osd e0 full_crc 3623640541 inc_crc 1456793561
2016-07-18 19:48:50.232499 7f0c083ea4c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c220
2016-07-18 19:48:50.232502 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
2016-07-18 19:48:50.232504 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(logm 0..0) election_finished
2016-07-18 19:48:50.232505 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(logm 0..0) _active
2016-07-18 19:48:50.232505 7f0c083ea4c0 7 mon.a@0(leader).paxosservice(logm 0..0) _active creating new pending
2016-07-18 19:48:50.232508 7f0c083ea4c0 10 mon.a@0(leader).log v0 create_pending v 1
2016-07-18 19:48:50.232509 7f0c083ea4c0 10 mon.a@0(leader).log v0 create_initial -- creating initial map
2016-07-18 19:48:50.232518 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(logm 0..0) propose_pending
2016-07-18 19:48:50.232521 7f0c083ea4c0 10 mon.a@0(leader).log v0 encode_full log v 0
2016-07-18 19:48:50.232526 7f0c083ea4c0 10 mon.a@0(leader).log v0 encode_pending v1
2016-07-18 19:48:50.232540 7f0c083ea4c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c210
2016-07-18 19:48:50.232544 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
2016-07-18 19:48:50.232545 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(auth 0..0) election_finished
2016-07-18 19:48:50.232546 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(auth 0..0) _active
2016-07-18 19:48:50.232547 7f0c083ea4c0 7 mon.a@0(leader).paxosservice(auth 0..0) _active creating new pending
2016-07-18 19:48:50.232548 7f0c083ea4c0 10 mon.a@0(leader).auth v0 create_pending v 1
2016-07-18 19:48:50.232551 7f0c083ea4c0 10 mon.a@0(leader).auth v0 create_initial -- creating initial map
2016-07-18 19:48:50.232759 7f0c083ea4c0 10 mon.a@0(leader).auth v0 check_rotate updated rotating
2016-07-18 19:48:50.232771 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(auth 0..0) propose_pending
2016-07-18 19:48:50.232782 7f0c083ea4c0 10 mon.a@0(leader).auth v0 encode_pending v 1
2016-07-18 19:48:50.232800 7f0c083ea4c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c250
2016-07-18 19:48:50.232804 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
2016-07-18 19:48:50.232805 7f0c083ea4c0 10 mon.a@0(leader).data_health(2) start_epoch epoch 2
2016-07-18 19:48:50.232811 7f0c083ea4c0 1 mon.a@0(leader) e0 apply_quorum_to_compatset_features enabling new quorum features: compat={},rocompat={},incompat={4=support erasure code pools,5=new-style osdmap encoding,6=support isa/lrc erasure code,7=support shec erasure code}
2016-07-18 19:48:50.233244 7f0c083ea4c0 10 mon.a@0(leader) e0 apply_compatset_features_to_quorum_requirements required_features 9025616074506240
2016-07-18 19:48:50.233253 7f0c083ea4c0 10 mon.a@0(leader) e0 timecheck_finish
2016-07-18 19:48:50.233256 7f0c083ea4c0 10 mon.a@0(leader) e0 resend_routed_requests
2016-07-18 19:48:50.233257 7f0c083ea4c0 10 mon.a@0(leader) e0 register_cluster_logger
2016-07-18 19:48:50.251223 7f0bff949700 10 accepter.accepter starting
2016-07-18 19:48:50.251235 7f0bff949700 20 accepter.accepter calling poll
2016-07-18 19:48:50.251233 7f0c0194d700 10 -- 127.0.0.1:7116/0 reaper_entry start
2016-07-18 19:48:50.251246 7f0c0194d700 10 -- 127.0.0.1:7116/0 reaper
2016-07-18 19:48:50.251248 7f0c0194d700 10 -- 127.0.0.1:7116/0 reaper done
2016-07-18 19:48:50.251258 7f0c0014a700 20 -- 127.0.0.1:7116/0 queue 0xb28cb40 prio 196
2016-07-18 19:48:50.252589 7f0c0094b700 1 -- 127.0.0.1:7116/0 <== mon.0 127.0.0.1:7116/0 0 ==== log(1 entries from seq 1 at 2016-07-18 19:48:50.231541) v1 ==== 0+0+0 (0 0 0) 0xb28cb40 con 0xb1c8800
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1321: display_logs: read file
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1458: main: code=1
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1460: main: teardown testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:118: teardown: local dir=testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:119: teardown: kill_daemons testdir/rados-striper KILL
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons: shopt -q -o xtrace
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons: echo true
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons: local trace=true
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:253: kill_daemons: true
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:253: kill_daemons: shopt -u -o xtrace
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:269: kill_daemons: return 0
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:120: teardown: stat -f -c %T .
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:120: teardown: '[' xfs == btrfs ']'
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:123: teardown: rm -fr testdir/rados-striper
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1461: main: return 1
    (1-1/1)